commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d18703237300f0e6b7d2a1ca88fbfa884e77c1b5
|
partner_event/models/res_partner.py
|
partner_event/models/res_partner.py
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_attended_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
@api.one
@api.depends('registrations')
def _count_attended_registration(self):
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
|
Use only one method to recalculate event counters
|
Use only one method to recalculate event counters
|
Python
|
agpl-3.0
|
open-synergy/event,Endika/event,Antiun/event,open-synergy/event
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_attended_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
@api.one
@api.depends('registrations')
def _count_attended_registration(self):
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
Use only one method to recalculate event counters
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_attended_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
@api.one
@api.depends('registrations')
def _count_attended_registration(self):
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
<commit_msg>Use only one method to recalculate event counters<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
|
# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_attended_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
@api.one
@api.depends('registrations')
def _count_attended_registration(self):
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
Use only one method to recalculate event counters# -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
|
<commit_before># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_attended_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
@api.one
@api.depends('registrations')
def _count_attended_registration(self):
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
<commit_msg>Use only one method to recalculate event counters<commit_after># -*- encoding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
registrations = fields.One2many(
string="Event registrations",
comodel_name='event.registration', inverse_name="partner_id")
registration_count = fields.Integer(
string='Event registrations number', compute='_count_registration',
store=True)
attended_registration_count = fields.Integer(
string='Event attended registrations number',
compute='_count_registration', store=True)
@api.one
@api.depends('registrations')
def _count_registration(self):
self.registration_count = len(self.registrations)
self.attended_registration_count = len(self.registrations.filtered(
lambda x: x.state == 'done'))
|
586259997aba2a18439ccf3b038ca6d49dd0a7dc
|
setup.py
|
setup.py
|
#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',),
)
|
#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',
'msgpack-python'),
)
|
Add messagepack to the list of dependencies, since runtime wont run without it.
|
Add messagepack to the list of dependencies, since runtime wont run without it.
|
Python
|
mit
|
armet/python-armet
|
#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',),
)
Add messagepack to the list of dependencies, since runtime wont run without it.
|
#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',
'msgpack-python'),
)
|
<commit_before>#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',),
)
<commit_msg>Add messagepack to the list of dependencies, since runtime wont run without it.<commit_after>
|
#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',
'msgpack-python'),
)
|
#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',),
)
Add messagepack to the list of dependencies, since runtime wont run without it.#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',
'msgpack-python'),
)
|
<commit_before>#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',),
)
<commit_msg>Add messagepack to the list of dependencies, since runtime wont run without it.<commit_after>#! /usr/bin/env python
import os
from setuptools import setup, find_packages
def read(filename):
with open(os.path.join(os.path.dirname(__file__), filename)) as file:
return file.read().strip()
setup(
name='django-armet',
version='0.2.0-pre',
description='Clean and modern framework in django for creating RESTful APIs.',
long_description=read('README.md'),
author='Concordus Applications',
author_email='support@concordusapps.com',
url='http://github.com/armet/django-armet',
package_dir={'armet': 'src/armet'},
packages=find_packages('src'),
# TODO: Grep this from the appropriate requirements files.
install_requires=(
'six',
'django',
'python-mimeparse',
'python-dateutil',
'parsedatetime',
'PyYAML',
'lxml',
'python-magic',
'msgpack-python'),
)
|
30f8be511a35b5951258ffba917a52d657c1e1ed
|
setup.py
|
setup.py
|
import setuptools
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
]
)
|
import setuptools
from glob import glob
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
],
data_files=[
('share/jupyter/nbextensions/nbresuse', glob('nbresuse/static/*'))
]
)
|
Add static files to data_files
|
Add static files to data_files
|
Python
|
bsd-2-clause
|
yuvipanda/nbresuse,allanlwu/allangdrive,yuvipanda/nbresuse,allanlwu/allangdrive
|
import setuptools
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
]
)
Add static files to data_files
|
import setuptools
from glob import glob
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
],
data_files=[
('share/jupyter/nbextensions/nbresuse', glob('nbresuse/static/*'))
]
)
|
<commit_before>import setuptools
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
]
)
<commit_msg>Add static files to data_files<commit_after>
|
import setuptools
from glob import glob
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
],
data_files=[
('share/jupyter/nbextensions/nbresuse', glob('nbresuse/static/*'))
]
)
|
import setuptools
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
]
)
Add static files to data_filesimport setuptools
from glob import glob
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
],
data_files=[
('share/jupyter/nbextensions/nbresuse', glob('nbresuse/static/*'))
]
)
|
<commit_before>import setuptools
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
]
)
<commit_msg>Add static files to data_files<commit_after>import setuptools
from glob import glob
setuptools.setup(
name="nbresuse",
version='0.1.0',
url="https://github.com/yuvipanda/nbresuse",
author="Yuvi Panda",
description="Simple Jupyter extension to show how much resources (RAM) your notebook is using",
packages=setuptools.find_packages(),
install_requires=[
'psutil',
'notebook',
],
data_files=[
('share/jupyter/nbextensions/nbresuse', glob('nbresuse/static/*'))
]
)
|
8b873e2bd6e4fbbf0464380cd799c9513b26386c
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'mock ; python_version < "3.0"',
'flake8>=2.0,<3.0',
'isort',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'flake8>=2.0,<3.0',
'isort',
],
'tests:python_version < "3.0"': ['mock'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
|
Use correct environment markers syntax in the extras_require section.
|
Use correct environment markers syntax in the extras_require section.
See https://github.com/pypa/setuptools/issues/1087
|
Python
|
bsd-3-clause
|
andrewgodwin/channels,django/channels,andrewgodwin/django-channels
|
from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'mock ; python_version < "3.0"',
'flake8>=2.0,<3.0',
'isort',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
Use correct environment markers syntax in the extras_require section.
See https://github.com/pypa/setuptools/issues/1087
|
from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'flake8>=2.0,<3.0',
'isort',
],
'tests:python_version < "3.0"': ['mock'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'mock ; python_version < "3.0"',
'flake8>=2.0,<3.0',
'isort',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Use correct environment markers syntax in the extras_require section.
See https://github.com/pypa/setuptools/issues/1087<commit_after>
|
from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'flake8>=2.0,<3.0',
'isort',
],
'tests:python_version < "3.0"': ['mock'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'mock ; python_version < "3.0"',
'flake8>=2.0,<3.0',
'isort',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
Use correct environment markers syntax in the extras_require section.
See https://github.com/pypa/setuptools/issues/1087from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'flake8>=2.0,<3.0',
'isort',
],
'tests:python_version < "3.0"': ['mock'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'mock ; python_version < "3.0"',
'flake8>=2.0,<3.0',
'isort',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Use correct environment markers syntax in the extras_require section.
See https://github.com/pypa/setuptools/issues/1087<commit_after>from setuptools import find_packages, setup
from channels import __version__
setup(
name='channels',
version=__version__,
url='http://github.com/django/channels',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description="Brings event-driven capabilities to Django with a channel system. Django 1.8 and up only.",
license='BSD',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=[
'Django>=1.8',
'asgiref~=1.1',
'daphne~=1.3',
],
extras_require={
'tests': [
'coverage',
'flake8>=2.0,<3.0',
'isort',
],
'tests:python_version < "3.0"': ['mock'],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
],
)
|
0447f10754467babb3096ed10dbfc81f8b67d66e
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='sspps',
version='0.1.1',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
|
from distutils.core import setup
setup(name='sspps',
version='0.1.2',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
|
Bump version to push homepage fix and readme comments
|
Bump version to push homepage fix and readme comments
|
Python
|
mit
|
greghaynes/SSPPS
|
from distutils.core import setup
setup(name='sspps',
version='0.1.1',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
Bump version to push homepage fix and readme comments
|
from distutils.core import setup
setup(name='sspps',
version='0.1.2',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
|
<commit_before>from distutils.core import setup
setup(name='sspps',
version='0.1.1',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
<commit_msg>Bump version to push homepage fix and readme comments<commit_after>
|
from distutils.core import setup
setup(name='sspps',
version='0.1.2',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
|
from distutils.core import setup
setup(name='sspps',
version='0.1.1',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
Bump version to push homepage fix and readme commentsfrom distutils.core import setup
setup(name='sspps',
version='0.1.2',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
|
<commit_before>from distutils.core import setup
setup(name='sspps',
version='0.1.1',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
<commit_msg>Bump version to push homepage fix and readme comments<commit_after>from distutils.core import setup
setup(name='sspps',
version='0.1.2',
description='Super Simple Python Plugin Sytem',
author='Gregory Haynes',
author_email='greg@greghaynes.net',
url='http://github.com/greghaynes/SSPPS',
license='MIT',
packages=['sspps'],
)
|
f23e4751a4ec4fef6ee5a1a313239fe3788d8a4b
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
|
#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9',
'Logbook == 0.6.0'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
|
Add logbook 0.6.0 as a dependency
|
Add logbook 0.6.0 as a dependency
|
Python
|
isc
|
a-sk/connman-dispatcher
|
#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
Add logbook 0.6.0 as a dependency
|
#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9',
'Logbook == 0.6.0'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
|
<commit_before>#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
<commit_msg>Add logbook 0.6.0 as a dependency<commit_after>
|
#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9',
'Logbook == 0.6.0'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
|
#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
Add logbook 0.6.0 as a dependency#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9',
'Logbook == 0.6.0'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
|
<commit_before>#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
<commit_msg>Add logbook 0.6.0 as a dependency<commit_after>#!/usr/bin/env python2
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
os.environ['PYTHONDONTWRITEBYTECODE'] = '1'
packages = [
'connman_dispatcher'
]
requires = [
'pyee >= 0.0.9',
'Logbook == 0.6.0'
]
setup(
name='connman-dispatcher',
version='0.0.6',
description='Call scripts on network changes',
long_description=open('README.md').read(),
author='Alexandr Skurikhin',
author_email='a.skurikhin@gmail.com',
url='http://github.com/a-sk/connman-dispatcher',
scripts=['bin/connman-dispatcher'],
packages=packages,
package_data={'': ['LICENSE']},
install_requires=requires,
license=open('LICENSE').read(),
)
del os.environ['PYTHONDONTWRITEBYTECODE']
|
53948f31d9054a747853e62cbe63ce91e4fe6e22
|
setup.py
|
setup.py
|
import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
|
import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client', 'prometheus_client.bridge'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
|
Split __init__.py into separate files.
|
Split __init__.py into separate files.
|
Python
|
apache-2.0
|
prometheus/client_python,justyns/client_python,arturhoo/client_python,alexander-95/client_python,thomaso-mirodin/client_python,korfuri/client_python,machinelady/client_python
|
import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
Split __init__.py into separate files.
|
import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client', 'prometheus_client.bridge'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
|
<commit_before>import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
<commit_msg>Split __init__.py into separate files.<commit_after>
|
import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client', 'prometheus_client.bridge'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
|
import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
Split __init__.py into separate files.import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client', 'prometheus_client.bridge'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
|
<commit_before>import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
<commit_msg>Split __init__.py into separate files.<commit_after>import os
from setuptools import setup
setup(
name = "prometheus_client",
version = "0.0.9",
author = "Brian Brazil",
author_email = "brian.brazil@gmail.com",
description = ("Python client for the Prometheus monitoring system."),
long_description = ("See https://github.com/prometheus/client_python/blob/master/README.md for documentation."),
license = "Apache Software License 2.0",
keywords = "prometheus monitoring instrumentation client",
url = "https://github.com/prometheus/client_python",
packages=['prometheus_client', 'prometheus_client.bridge'],
test_suite="tests",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Information Technology",
"Intended Audience :: System Administrators",
"Topic :: System :: Monitoring",
"License :: OSI Approved :: Apache Software License",
],
)
|
116a590c8273cc6f048f5970bc4a0f4f5bfff074
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
#include_package_data=True,
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
|
Remove old method of adding datafiles.
|
Remove old method of adding datafiles.
|
Python
|
bsd-3-clause
|
xlcteam/pynxc,xlcteam/pynxc,xlcteam/pynxc
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
#include_package_data=True,
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
Remove old method of adding datafiles.
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
#include_package_data=True,
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
<commit_msg>Remove old method of adding datafiles.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
#include_package_data=True,
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
Remove old method of adding datafiles.#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
#include_package_data=True,
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
<commit_msg>Remove old method of adding datafiles.<commit_after>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup
setup(name='pynxc',
version='0.1.7',
description='A Python to NXC Converter for programming '
'LEGO MINDSTORMS Robots',
author='Brian Blais',
author_email='bblais@bryant.edu',
maintainer='Marek Šuppa',
maintainer_email='marek@suppa.sk',
url='https://github.com/xlcteam/pynxc',
packages=['pynxc'],
data_files=[('tests', ['pynxc/tests/tests.py']),
('', ['pynxc/defs.h'])],
entry_points = {
'console_scripts': [
'pynxc = pynxc:main'
]
}
)
|
834b7ff81d6e2777d3952bb588a53f12f5ace5f5
|
setup.py
|
setup.py
|
#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
|
#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
Add a Python 3 classifier recommended by community
|
Add a Python 3 classifier recommended by community
|
Python
|
mit
|
rfk/regobj
|
#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
Add a Python 3 classifier recommended by community
|
#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
<commit_before>#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
<commit_msg>Add a Python 3 classifier recommended by community<commit_after>
|
#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
Add a Python 3 classifier recommended by community#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
<commit_before>#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
)
<commit_msg>Add a Python 3 classifier recommended by community<commit_after>#
# This is the regobj setuptools script.
# Originally developed by Ryan Kelly, 2009.
#
# This script is placed in the public domain.
#
from distutils.core import setup
# If we did a straight `import regobj` here we wouldn't be able
# to build on non-win32 machines.
regobj = {}
try:
execfile("regobj.py",regobj)
except ImportError:
pass
VERSION = regobj["__version__"]
NAME = "regobj"
DESCRIPTION = "Pythonic object-based access to the Windows Registry."
LONG_DESC = regobj["__doc__"]
AUTHOR = "Ryan Kelly"
AUTHOR_EMAIL = "ryan@rfk.id.au"
URL="https://github.com/rfk/regobj"
LICENSE = "MIT"
KEYWORDS = "windows registry"
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
py_modules=["regobj"],
classifiers=[c.strip() for c in """
Intended Audience :: Developers
License :: OSI Approved :: MIT License
Programming Language :: Python :: 2
Programming Language :: Python :: 3
Topic :: Software Development :: Libraries :: Python Modules
""".split('\n') if c.strip()],
)
|
2d63c7890e84ee6512095ce960d6d6b5e2187163
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: BSD'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: OSI Approved :: BSD License'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
|
Change trove classifier; 'BSD' was invalid
|
Change trove classifier; 'BSD' was invalid
List of available is here:
https://pypi.python.org/pypi?:action=list_classifiers
|
Python
|
bsd-2-clause
|
whit537/assertEquals
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: BSD'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
Change trove classifier; 'BSD' was invalid
List of available is here:
https://pypi.python.org/pypi?:action=list_classifiers
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: OSI Approved :: BSD License'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
|
<commit_before>#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: BSD'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
<commit_msg>Change trove classifier; 'BSD' was invalid
List of available is here:
https://pypi.python.org/pypi?:action=list_classifiers<commit_after>
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: OSI Approved :: BSD License'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
|
#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: BSD'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
Change trove classifier; 'BSD' was invalid
List of available is here:
https://pypi.python.org/pypi?:action=list_classifiers#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: OSI Approved :: BSD License'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
|
<commit_before>#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: BSD'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
<commit_msg>Change trove classifier; 'BSD' was invalid
List of available is here:
https://pypi.python.org/pypi?:action=list_classifiers<commit_after>#!/usr/bin/env python
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
classifiers = [
'Development Status :: 4 - Beta'
, 'Environment :: Console'
, 'Environment :: Console :: Curses'
, 'Intended Audience :: Developers'
, 'License :: OSI Approved :: BSD License'
, 'Natural Language :: English'
, 'Operating System :: MacOS :: MacOS X'
, 'Operating System :: Microsoft :: Windows'
, 'Operating System :: POSIX'
, 'Programming Language :: Python'
, 'Topic :: Software Development :: Testing'
]
setup( name = 'assertEquals'
, version = '0.4.3'
, packages = [ 'assertEquals'
, 'assertEquals.cli'
, 'assertEquals.interactive'
, 'assertEquals.interactive.screens'
, 'assertEquals.tests'
, 'assertEquals.tests.interactive'
]
, entry_points = { 'console_scripts'
: [ 'assertEquals = assertEquals.cli.main:main' ]
}
, description = 'assertEquals is an epic testing interface for Python.'
, author = 'Chad Whitacre'
, author_email = 'chad@zetaweb.com'
, url = 'https://www.github.com/whit537/assertEquals/'
, classifiers = classifiers
)
|
7170a78d7aeb004689143d72d7f243b1e98dafb3
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'beautifulsoup4',
'lxml',
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
|
from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
|
Remove beautifulsoup4 and lxml as dependencies.
|
Remove beautifulsoup4 and lxml as dependencies.
|
Python
|
mit
|
reillysiemens/gethazel
|
from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'beautifulsoup4',
'lxml',
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
Remove beautifulsoup4 and lxml as dependencies.
|
from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
|
<commit_before>from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'beautifulsoup4',
'lxml',
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
<commit_msg>Remove beautifulsoup4 and lxml as dependencies.<commit_after>
|
from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
|
from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'beautifulsoup4',
'lxml',
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
Remove beautifulsoup4 and lxml as dependencies.from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
|
<commit_before>from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'beautifulsoup4',
'lxml',
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
<commit_msg>Remove beautifulsoup4 and lxml as dependencies.<commit_after>from setuptools import setup
setup(
name='gethazel',
version='0.1.0',
description='A balanced life is a good life.',
author='Reilly Tucker Siemens',
author_email='reilly.siemens@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.4'
],
keywords='Corgi Hazel Balance',
py_modules=['gethazel'],
install_requires=[
'requests',
],
entry_points={
'console_scripts': [
'gethazel=gethazel:main',
]
},
)
|
024d797eda5ceeaf954c1ea41df7fb43fb40130c
|
setup.py
|
setup.py
|
"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
],
)
|
"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
'python-dateutil',
],
)
|
Add a dependency that got dropped.
|
Add a dependency that got dropped.
|
Python
|
bsd-3-clause
|
yacoob/aib2ofx
|
"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
],
)
Add a dependency that got dropped.
|
"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
'python-dateutil',
],
)
|
<commit_before>"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
],
)
<commit_msg>Add a dependency that got dropped.<commit_after>
|
"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
'python-dateutil',
],
)
|
"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
],
)
Add a dependency that got dropped."""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
'python-dateutil',
],
)
|
<commit_before>"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
],
)
<commit_msg>Add a dependency that got dropped.<commit_after>"""Setup data for aib2ofx."""
from setuptools import setup
setup(
name='aib2ofx',
description='Download data from aib.ie in OFX format',
version='0.6',
author='Jakub Turski',
author_email='yacoob@gmail.com',
url='http://github.com/yacoob/aib2ofx',
packages=['aib2ofx'],
entry_points={
'console_scripts': ['aib2ofx = aib2ofx.main:main'],
},
dependency_links=[
'git+https://github.com/MechanicalSoup/MechanicalSoup'
'#egg=mechanicalsoup --process-dependency-links'
],
install_requires=[
'mechanicalsoup',
'python-dateutil',
],
)
|
3c2e19c99afbb6f0fc1eace6c29adea0cab7ebdc
|
irclogview/views.py
|
irclogview/views.py
|
from django.http import HttpResponse
def index(request):
return HttpResponse('index')
def channel(request, name):
return HttpResponse('channel: %s' % name)
def show(request, name, year, month, day):
return HttpResponse('show: %s - %s/%s/%s' % (name, year, month, day))
|
from datetime import datetime
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from annoying.decorators import render_to
from .models import Channel, Log
def index(request):
return HttpResponse('index')
def channel_index(request, name):
channel = get_object_or_404(Channel, name=name)
return HttpResponse('channel: %s' % name)
@render_to('irclogview/show_log.html')
def show_log(request, name, year, month, day):
channel = get_object_or_404(Channel, name=name)
date = datetime(int(year), int(month), int(day)).date()
log = get_object_or_404(Log, channel=channel, date=date)
return {'log': log}
|
Add view to show log of a channel on a day
|
Add view to show log of a channel on a day
|
Python
|
agpl-3.0
|
fajran/irclogview,BlankOn/irclogview,fajran/irclogview,BlankOn/irclogview
|
from django.http import HttpResponse
def index(request):
return HttpResponse('index')
def channel(request, name):
return HttpResponse('channel: %s' % name)
def show(request, name, year, month, day):
return HttpResponse('show: %s - %s/%s/%s' % (name, year, month, day))
Add view to show log of a channel on a day
|
from datetime import datetime
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from annoying.decorators import render_to
from .models import Channel, Log
def index(request):
return HttpResponse('index')
def channel_index(request, name):
channel = get_object_or_404(Channel, name=name)
return HttpResponse('channel: %s' % name)
@render_to('irclogview/show_log.html')
def show_log(request, name, year, month, day):
channel = get_object_or_404(Channel, name=name)
date = datetime(int(year), int(month), int(day)).date()
log = get_object_or_404(Log, channel=channel, date=date)
return {'log': log}
|
<commit_before>from django.http import HttpResponse
def index(request):
return HttpResponse('index')
def channel(request, name):
return HttpResponse('channel: %s' % name)
def show(request, name, year, month, day):
return HttpResponse('show: %s - %s/%s/%s' % (name, year, month, day))
<commit_msg>Add view to show log of a channel on a day<commit_after>
|
from datetime import datetime
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from annoying.decorators import render_to
from .models import Channel, Log
def index(request):
return HttpResponse('index')
def channel_index(request, name):
channel = get_object_or_404(Channel, name=name)
return HttpResponse('channel: %s' % name)
@render_to('irclogview/show_log.html')
def show_log(request, name, year, month, day):
channel = get_object_or_404(Channel, name=name)
date = datetime(int(year), int(month), int(day)).date()
log = get_object_or_404(Log, channel=channel, date=date)
return {'log': log}
|
from django.http import HttpResponse
def index(request):
return HttpResponse('index')
def channel(request, name):
return HttpResponse('channel: %s' % name)
def show(request, name, year, month, day):
return HttpResponse('show: %s - %s/%s/%s' % (name, year, month, day))
Add view to show log of a channel on a dayfrom datetime import datetime
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from annoying.decorators import render_to
from .models import Channel, Log
def index(request):
return HttpResponse('index')
def channel_index(request, name):
channel = get_object_or_404(Channel, name=name)
return HttpResponse('channel: %s' % name)
@render_to('irclogview/show_log.html')
def show_log(request, name, year, month, day):
channel = get_object_or_404(Channel, name=name)
date = datetime(int(year), int(month), int(day)).date()
log = get_object_or_404(Log, channel=channel, date=date)
return {'log': log}
|
<commit_before>from django.http import HttpResponse
def index(request):
return HttpResponse('index')
def channel(request, name):
return HttpResponse('channel: %s' % name)
def show(request, name, year, month, day):
return HttpResponse('show: %s - %s/%s/%s' % (name, year, month, day))
<commit_msg>Add view to show log of a channel on a day<commit_after>from datetime import datetime
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from annoying.decorators import render_to
from .models import Channel, Log
def index(request):
return HttpResponse('index')
def channel_index(request, name):
channel = get_object_or_404(Channel, name=name)
return HttpResponse('channel: %s' % name)
@render_to('irclogview/show_log.html')
def show_log(request, name, year, month, day):
channel = get_object_or_404(Channel, name=name)
date = datetime(int(year), int(month), int(day)).date()
log = get_object_or_404(Log, channel=channel, date=date)
return {'log': log}
|
c3e5f19c4148eeb2b32e6ef30585f9355fa31812
|
kolibri/__init__.py
|
kolibri/__init__.py
|
"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "beta", 0)
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
|
"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "final")
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
|
Upgrade VERSION to 0.12.9 final
|
Upgrade VERSION to 0.12.9 final
|
Python
|
mit
|
indirectlylit/kolibri,learningequality/kolibri,learningequality/kolibri,mrpau/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,mrpau/kolibri,indirectlylit/kolibri,mrpau/kolibri,learningequality/kolibri,learningequality/kolibri,mrpau/kolibri
|
"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "beta", 0)
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
Upgrade VERSION to 0.12.9 final
|
"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "final")
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
|
<commit_before>"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "beta", 0)
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
<commit_msg>Upgrade VERSION to 0.12.9 final<commit_after>
|
"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "final")
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
|
"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "beta", 0)
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
Upgrade VERSION to 0.12.9 final"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "final")
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
|
<commit_before>"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "beta", 0)
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
<commit_msg>Upgrade VERSION to 0.12.9 final<commit_after>"""
CAUTION! Keep everything here at at minimum. Do not import stuff.
This module is imported in setup.py, so you cannot for instance
import a dependency.
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from .utils import env
from .utils.version import get_version
# Setup the environment before loading anything else from the application
env.set_env()
#: This may not be the exact version as it's subject to modification with
#: get_version() - use ``kolibri.__version__`` for the exact version string.
VERSION = (0, 12, 9, "final")
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = str(get_version(VERSION))
|
cab360d14a6b02cc1cf4649823acd2e2c683d240
|
utils/swift_build_support/swift_build_support/products/swift.py
|
utils/swift_build_support/swift_build_support/products/swift.py
|
# swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._compute_runtime_use_sanitizer())
def _compute_runtime_use_sanitizer(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
|
# swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._runtime_sanitizer_flags)
@property
def _runtime_sanitizer_flags(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
|
Change method _compute_runtime_use_sanitizer => property _runtime_sanitizer_flags. NFC.
|
[vacation-gardening] Change method _compute_runtime_use_sanitizer => property _runtime_sanitizer_flags. NFC.
|
Python
|
apache-2.0
|
parkera/swift,huonw/swift,CodaFi/swift,parkera/swift,codestergit/swift,austinzheng/swift,bitjammer/swift,glessard/swift,djwbrown/swift,bitjammer/swift,gottesmm/swift,lorentey/swift,tkremenek/swift,OscarSwanros/swift,milseman/swift,airspeedswift/swift,arvedviehweger/swift,CodaFi/swift,IngmarStein/swift,JaSpa/swift,atrick/swift,jckarter/swift,gribozavr/swift,karwa/swift,gregomni/swift,lorentey/swift,kperryua/swift,atrick/swift,shahmishal/swift,parkera/swift,jmgc/swift,codestergit/swift,jtbandes/swift,nathawes/swift,swiftix/swift,alblue/swift,roambotics/swift,swiftix/swift,uasys/swift,jmgc/swift,natecook1000/swift,brentdax/swift,harlanhaskins/swift,aschwaighofer/swift,danielmartin/swift,harlanhaskins/swift,rudkx/swift,sschiau/swift,deyton/swift,felix91gr/swift,JGiola/swift,practicalswift/swift,milseman/swift,modocache/swift,austinzheng/swift,roambotics/swift,IngmarStein/swift,danielmartin/swift,tkremenek/swift,danielmartin/swift,hughbe/swift,shahmishal/swift,amraboelela/swift,jopamer/swift,tinysun212/swift-windows,JGiola/swift,therealbnut/swift,lorentey/swift,jtbandes/swift,gribozavr/swift,arvedviehweger/swift,brentdax/swift,manavgabhawala/swift,alblue/swift,practicalswift/swift,kstaring/swift,amraboelela/swift,milseman/swift,harlanhaskins/swift,felix91gr/swift,OscarSwanros/swift,return/swift,ben-ng/swift,alblue/swift,gregomni/swift,sschiau/swift,practicalswift/swift,gottesmm/swift,JGiola/swift,harlanhaskins/swift,huonw/swift,bitjammer/swift,uasys/swift,calebd/swift,JaSpa/swift,kperryua/swift,bitjammer/swift,gregomni/swift,return/swift,jopamer/swift,shajrawi/swift,codestergit/swift,uasys/swift,devincoughlin/swift,Jnosh/swift,xedin/swift,rudkx/swift,benlangmuir/swift,stephentyrone/swift,apple/swift,nathawes/swift,frootloops/swift,ahoppen/swift,xwu/swift,therealbnut/swift,sschiau/swift,codestergit/swift,arvedviehweger/swift,allevato/swift,tjw/swift,calebd/swift,kstaring/swift,deyton/swift,benlangmuir/swift,bitjammer/swift,tardieu/swift,jtbandes/swift,alblue/swift,return/swift,jtbandes/swift,jopamer/swift,jckarter/swift,shahmishal/swift,djwbrown/swift,milseman/swift,natecook1000/swift,benlangmuir/swift,gmilos/swift,JaSpa/swift,karwa/swift,frootloops/swift,austinzheng/swift,manavgabhawala/swift,return/swift,jtbandes/swift,practicalswift/swift,shajrawi/swift,gottesmm/swift,frootloops/swift,practicalswift/swift,JGiola/swift,felix91gr/swift,kstaring/swift,glessard/swift,zisko/swift,nathawes/swift,danielmartin/swift,natecook1000/swift,swiftix/swift,djwbrown/swift,zisko/swift,codestergit/swift,sschiau/swift,CodaFi/swift,alblue/swift,allevato/swift,allevato/swift,stephentyrone/swift,hooman/swift,tkremenek/swift,rudkx/swift,hughbe/swift,austinzheng/swift,huonw/swift,shahmishal/swift,tinysun212/swift-windows,parkera/swift,arvedviehweger/swift,ahoppen/swift,tinysun212/swift-windows,milseman/swift,tjw/swift,kstaring/swift,xwu/swift,xwu/swift,codestergit/swift,tkremenek/swift,IngmarStein/swift,return/swift,swiftix/swift,frootloops/swift,modocache/swift,jtbandes/swift,tkremenek/swift,devincoughlin/swift,gribozavr/swift,jckarter/swift,atrick/swift,shajrawi/swift,deyton/swift,ben-ng/swift,calebd/swift,jckarter/swift,brentdax/swift,tardieu/swift,austinzheng/swift,parkera/swift,modocache/swift,ben-ng/swift,stephentyrone/swift,CodaFi/swift,JGiola/swift,xwu/swift,zisko/swift,tardieu/swift,OscarSwanros/swift,austinzheng/swift,OscarSwanros/swift,xwu/swift,shahmishal/swift,allevato/swift,huonw/swift,therealbnut/swift,tardieu/swift,sschiau/swift,ben-ng/swift,brentdax/swift,kperryua/swift,brentdax/swift,modocache/swift,shahmishal/swift,brentdax/swift,devincoughlin/swift,manavgabhawala/swift,calebd/swift,manavgabhawala/swift,kperryua/swift,hughbe/swift,nathawes/swift,airspeedswift/swift,austinzheng/swift,codestergit/swift,nathawes/swift,jopamer/swift,gribozavr/swift,kperryua/swift,gmilos/swift,xedin/swift,glessard/swift,tinysun212/swift-windows,ben-ng/swift,practicalswift/swift,ahoppen/swift,gottesmm/swift,kperryua/swift,airspeedswift/swift,aschwaighofer/swift,calebd/swift,Jnosh/swift,tkremenek/swift,xwu/swift,djwbrown/swift,roambotics/swift,gribozavr/swift,allevato/swift,xedin/swift,xwu/swift,benlangmuir/swift,gottesmm/swift,tkremenek/swift,brentdax/swift,OscarSwanros/swift,frootloops/swift,gottesmm/swift,Jnosh/swift,sschiau/swift,harlanhaskins/swift,CodaFi/swift,zisko/swift,therealbnut/swift,IngmarStein/swift,jckarter/swift,return/swift,deyton/swift,atrick/swift,parkera/swift,parkera/swift,therealbnut/swift,stephentyrone/swift,apple/swift,JaSpa/swift,uasys/swift,karwa/swift,JaSpa/swift,hooman/swift,tjw/swift,lorentey/swift,OscarSwanros/swift,shajrawi/swift,tinysun212/swift-windows,aschwaighofer/swift,apple/swift,devincoughlin/swift,natecook1000/swift,allevato/swift,natecook1000/swift,roambotics/swift,devincoughlin/swift,gmilos/swift,arvedviehweger/swift,danielmartin/swift,kperryua/swift,aschwaighofer/swift,lorentey/swift,lorentey/swift,amraboelela/swift,natecook1000/swift,return/swift,bitjammer/swift,aschwaighofer/swift,uasys/swift,xedin/swift,karwa/swift,gmilos/swift,kstaring/swift,gribozavr/swift,harlanhaskins/swift,deyton/swift,frootloops/swift,jckarter/swift,sschiau/swift,hooman/swift,xedin/swift,apple/swift,milseman/swift,glessard/swift,CodaFi/swift,felix91gr/swift,hooman/swift,harlanhaskins/swift,felix91gr/swift,gribozavr/swift,amraboelela/swift,karwa/swift,jmgc/swift,Jnosh/swift,gribozavr/swift,ben-ng/swift,jtbandes/swift,alblue/swift,tjw/swift,CodaFi/swift,benlangmuir/swift,devincoughlin/swift,swiftix/swift,gregomni/swift,lorentey/swift,ahoppen/swift,nathawes/swift,airspeedswift/swift,kstaring/swift,rudkx/swift,shajrawi/swift,OscarSwanros/swift,tinysun212/swift-windows,danielmartin/swift,huonw/swift,huonw/swift,manavgabhawala/swift,jopamer/swift,ahoppen/swift,Jnosh/swift,karwa/swift,shajrawi/swift,jmgc/swift,aschwaighofer/swift,sschiau/swift,alblue/swift,tardieu/swift,gmilos/swift,airspeedswift/swift,airspeedswift/swift,xedin/swift,felix91gr/swift,tjw/swift,xedin/swift,ben-ng/swift,aschwaighofer/swift,gottesmm/swift,frootloops/swift,gmilos/swift,calebd/swift,Jnosh/swift,stephentyrone/swift,apple/swift,zisko/swift,IngmarStein/swift,stephentyrone/swift,jmgc/swift,swiftix/swift,JGiola/swift,tardieu/swift,lorentey/swift,shahmishal/swift,djwbrown/swift,tardieu/swift,felix91gr/swift,JaSpa/swift,arvedviehweger/swift,jopamer/swift,manavgabhawala/swift,jopamer/swift,nathawes/swift,atrick/swift,tjw/swift,natecook1000/swift,Jnosh/swift,karwa/swift,modocache/swift,ahoppen/swift,uasys/swift,rudkx/swift,parkera/swift,modocache/swift,shahmishal/swift,allevato/swift,jmgc/swift,xedin/swift,jckarter/swift,stephentyrone/swift,danielmartin/swift,hughbe/swift,hooman/swift,airspeedswift/swift,swiftix/swift,practicalswift/swift,deyton/swift,gmilos/swift,devincoughlin/swift,arvedviehweger/swift,roambotics/swift,JaSpa/swift,gregomni/swift,kstaring/swift,glessard/swift,shajrawi/swift,roambotics/swift,milseman/swift,atrick/swift,amraboelela/swift,manavgabhawala/swift,hooman/swift,hughbe/swift,therealbnut/swift,huonw/swift,hughbe/swift,bitjammer/swift,therealbnut/swift,apple/swift,amraboelela/swift,devincoughlin/swift,rudkx/swift,modocache/swift,djwbrown/swift,glessard/swift,karwa/swift,uasys/swift,hooman/swift,gregomni/swift,benlangmuir/swift,hughbe/swift,shajrawi/swift,djwbrown/swift,tinysun212/swift-windows,IngmarStein/swift,amraboelela/swift,zisko/swift,deyton/swift,calebd/swift,jmgc/swift,zisko/swift,practicalswift/swift,IngmarStein/swift,tjw/swift
|
# swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._compute_runtime_use_sanitizer())
def _compute_runtime_use_sanitizer(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
[vacation-gardening] Change method _compute_runtime_use_sanitizer => property _runtime_sanitizer_flags. NFC.
|
# swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._runtime_sanitizer_flags)
@property
def _runtime_sanitizer_flags(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
|
<commit_before># swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._compute_runtime_use_sanitizer())
def _compute_runtime_use_sanitizer(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
<commit_msg>[vacation-gardening] Change method _compute_runtime_use_sanitizer => property _runtime_sanitizer_flags. NFC.<commit_after>
|
# swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._runtime_sanitizer_flags)
@property
def _runtime_sanitizer_flags(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
|
# swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._compute_runtime_use_sanitizer())
def _compute_runtime_use_sanitizer(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
[vacation-gardening] Change method _compute_runtime_use_sanitizer => property _runtime_sanitizer_flags. NFC.# swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._runtime_sanitizer_flags)
@property
def _runtime_sanitizer_flags(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
|
<commit_before># swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._compute_runtime_use_sanitizer())
def _compute_runtime_use_sanitizer(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
<commit_msg>[vacation-gardening] Change method _compute_runtime_use_sanitizer => property _runtime_sanitizer_flags. NFC.<commit_after># swift_build_support/products/swift.py -------------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from . import product
class Swift(product.Product):
def __init__(self, args, toolchain, source_dir, build_dir):
product.Product.__init__(self, args, toolchain, source_dir,
build_dir)
# Add any runtime sanitizer arguments.
self.cmake_options.extend(self._runtime_sanitizer_flags)
@property
def _runtime_sanitizer_flags(self):
sanitizer_list = []
if self.args.enable_tsan_runtime:
sanitizer_list += ['Thread']
if len(sanitizer_list) == 0:
return []
return ["-DSWIFT_RUNTIME_USE_SANITIZERS=%s" %
";".join(sanitizer_list)]
|
d4c7648b5f77531f821b0a2a728098ae352ce0cb
|
saleor/product/management/commands/populatedb.py
|
saleor/product/management/commands/populatedb.py
|
from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'],
is_active=True, is_staff=True, is_superuser=True)
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
|
from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'], defaults={
'is_active': True, 'is_staff': True, 'is_superuser': True})
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
|
Check existence of the user with appropriate email address
|
Check existence of the user with appropriate email address
|
Python
|
bsd-3-clause
|
laosunhust/saleor,KenMutemi/saleor,rchav/vinerack,maferelo/saleor,UITools/saleor,HyperManTT/ECommerceSaleor,laosunhust/saleor,spartonia/saleor,car3oon/saleor,spartonia/saleor,car3oon/saleor,tfroehlich82/saleor,UITools/saleor,spartonia/saleor,tfroehlich82/saleor,car3oon/saleor,rodrigozn/CW-Shop,jreigel/saleor,itbabu/saleor,KenMutemi/saleor,jreigel/saleor,tfroehlich82/saleor,maferelo/saleor,itbabu/saleor,spartonia/saleor,rodrigozn/CW-Shop,rchav/vinerack,mociepka/saleor,jreigel/saleor,UITools/saleor,rchav/vinerack,HyperManTT/ECommerceSaleor,HyperManTT/ECommerceSaleor,maferelo/saleor,itbabu/saleor,mociepka/saleor,laosunhust/saleor,UITools/saleor,rodrigozn/CW-Shop,laosunhust/saleor,KenMutemi/saleor,mociepka/saleor,UITools/saleor
|
from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'],
is_active=True, is_staff=True, is_superuser=True)
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
Check existence of the user with appropriate email address
|
from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'], defaults={
'is_active': True, 'is_staff': True, 'is_superuser': True})
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
|
<commit_before>from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'],
is_active=True, is_staff=True, is_superuser=True)
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
<commit_msg>Check existence of the user with appropriate email address<commit_after>
|
from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'], defaults={
'is_active': True, 'is_staff': True, 'is_superuser': True})
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
|
from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'],
is_active=True, is_staff=True, is_superuser=True)
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
Check existence of the user with appropriate email addressfrom django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'], defaults={
'is_active': True, 'is_staff': True, 'is_superuser': True})
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
|
<commit_before>from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'],
is_active=True, is_staff=True, is_superuser=True)
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
<commit_msg>Check existence of the user with appropriate email address<commit_after>from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'], defaults={
'is_active': True, 'is_staff': True, 'is_superuser': True})
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
|
9c25a37d4d2bc7459835dab923ad5610470c3e5a
|
lbrynet/__init__.py
|
lbrynet/__init__.py
|
import logging
__version__ = "0.20.0rc8"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
import logging
__version__ = "0.20.0rc9"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Bump version 0.20.0rc8 --> 0.20.0rc9
|
Bump version 0.20.0rc8 --> 0.20.0rc9
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io>
|
Python
|
mit
|
lbryio/lbry,lbryio/lbry,lbryio/lbry
|
import logging
__version__ = "0.20.0rc8"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
Bump version 0.20.0rc8 --> 0.20.0rc9
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io>
|
import logging
__version__ = "0.20.0rc9"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
<commit_before>import logging
__version__ = "0.20.0rc8"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
<commit_msg>Bump version 0.20.0rc8 --> 0.20.0rc9
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io><commit_after>
|
import logging
__version__ = "0.20.0rc9"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
import logging
__version__ = "0.20.0rc8"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
Bump version 0.20.0rc8 --> 0.20.0rc9
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io>import logging
__version__ = "0.20.0rc9"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
<commit_before>import logging
__version__ = "0.20.0rc8"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
<commit_msg>Bump version 0.20.0rc8 --> 0.20.0rc9
Signed-off-by: Jack Robison <40884020c67726395ea162083a125620dc32cdab@lbry.io><commit_after>import logging
__version__ = "0.20.0rc9"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
d2fffc5e206a3305f98c0c9a4f2527b868e93eb3
|
lexicon/__init__.py
|
lexicon/__init__.py
|
from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
|
from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
def __getattr__(self, key):
# Intercept deepcopy/etc driven access to self.aliases when not
# actually set. (Only a problem for us, due to abovementioned combo of
# Alias and Attribute Dicts, so not solvable in a parent alone.)
if key == 'aliases' and key not in self.__dict__:
self.__dict__[key] = {}
return super(Lexicon, self).__getattr__(key)
|
Fix problems using Lexicons in deepcopy'd objects.
|
Fix problems using Lexicons in deepcopy'd objects.
Said problems actually only manifest as 'ignored'
RuntimeErrors, but those are really annoying and hard
to hide. This seems to be the right fix.
|
Python
|
bsd-2-clause
|
mindw/lexicon,bitprophet/lexicon
|
from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
Fix problems using Lexicons in deepcopy'd objects.
Said problems actually only manifest as 'ignored'
RuntimeErrors, but those are really annoying and hard
to hide. This seems to be the right fix.
|
from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
def __getattr__(self, key):
# Intercept deepcopy/etc driven access to self.aliases when not
# actually set. (Only a problem for us, due to abovementioned combo of
# Alias and Attribute Dicts, so not solvable in a parent alone.)
if key == 'aliases' and key not in self.__dict__:
self.__dict__[key] = {}
return super(Lexicon, self).__getattr__(key)
|
<commit_before>from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
<commit_msg>Fix problems using Lexicons in deepcopy'd objects.
Said problems actually only manifest as 'ignored'
RuntimeErrors, but those are really annoying and hard
to hide. This seems to be the right fix.<commit_after>
|
from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
def __getattr__(self, key):
# Intercept deepcopy/etc driven access to self.aliases when not
# actually set. (Only a problem for us, due to abovementioned combo of
# Alias and Attribute Dicts, so not solvable in a parent alone.)
if key == 'aliases' and key not in self.__dict__:
self.__dict__[key] = {}
return super(Lexicon, self).__getattr__(key)
|
from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
Fix problems using Lexicons in deepcopy'd objects.
Said problems actually only manifest as 'ignored'
RuntimeErrors, but those are really annoying and hard
to hide. This seems to be the right fix.from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
def __getattr__(self, key):
# Intercept deepcopy/etc driven access to self.aliases when not
# actually set. (Only a problem for us, due to abovementioned combo of
# Alias and Attribute Dicts, so not solvable in a parent alone.)
if key == 'aliases' and key not in self.__dict__:
self.__dict__[key] = {}
return super(Lexicon, self).__getattr__(key)
|
<commit_before>from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
<commit_msg>Fix problems using Lexicons in deepcopy'd objects.
Said problems actually only manifest as 'ignored'
RuntimeErrors, but those are really annoying and hard
to hide. This seems to be the right fix.<commit_after>from attribute_dict import AttributeDict
from alias_dict import AliasDict
__version__ = "0.1.0"
class Lexicon(AttributeDict, AliasDict):
def __init__(self, *args, **kwargs):
# Need to avoid combining AliasDict's initial attribute write on
# self.aliases, with AttributeDict's __setattr__. Doing so results in
# an infinite loop. Instead, just skip straight to dict() for both
# explicitly (i.e. we override AliasDict.__init__ instead of extending
# it.)
# NOTE: could tickle AttributeDict.__init__ instead, in case it ever
# grows one.
dict.__init__(self, *args, **kwargs)
dict.__setattr__(self, 'aliases', {})
def __getattr__(self, key):
# Intercept deepcopy/etc driven access to self.aliases when not
# actually set. (Only a problem for us, due to abovementioned combo of
# Alias and Attribute Dicts, so not solvable in a parent alone.)
if key == 'aliases' and key not in self.__dict__:
self.__dict__[key] = {}
return super(Lexicon, self).__getattr__(key)
|
aa8fb3c94dbbb7cae9b13d4441c59a7607b84583
|
cloudshell/networking/networking_resource_driver_interface.py
|
cloudshell/networking/networking_resource_driver_interface.py
|
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def update_firmware(self, context, remote_host, file_path):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
Modify networking resource driver interface according to the latest networking standard
|
Modify networking resource driver interface according to the latest networking standard
|
Python
|
apache-2.0
|
QualiSystems/cloudshell-networking,QualiSystems/CloudShell-Networking-Core
|
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def update_firmware(self, context, remote_host, file_path):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
Modify networking resource driver interface according to the latest networking standard
|
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
<commit_before>from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def update_firmware(self, context, remote_host, file_path):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
<commit_msg>Modify networking resource driver interface according to the latest networking standard<commit_after>
|
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def update_firmware(self, context, remote_host, file_path):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
Modify networking resource driver interface according to the latest networking standardfrom abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
<commit_before>from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_command(self, context, custom_command):
pass
@abstractmethod
def send_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def update_firmware(self, context, remote_host, file_path):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
<commit_msg>Modify networking resource driver interface according to the latest networking standard<commit_after>from abc import ABCMeta
from abc import abstractmethod
class NetworkingResourceDriverInterface(object):
__metaclass__ = ABCMeta
@abstractmethod
def ApplyConnectivityChanges(self, context, request):
pass
@abstractmethod
def run_custom_command(self, context, custom_command):
pass
@abstractmethod
def run_custom_config_command(self, context, custom_command):
pass
@abstractmethod
def save(self, context, folder_path, configuration_type, vrf_management_name):
pass
@abstractmethod
def restore(self, context, path, configuration_type, restore_method, vrf_management_name):
pass
@abstractmethod
def get_inventory(self, context):
pass
@abstractmethod
def orchestration_restore(self, context, saved_artifact_info, custom_params):
pass
@abstractmethod
def orchestration_save(self, context, mode, custom_params):
pass
@abstractmethod
def health_check(self, context):
pass
@abstractmethod
def load_firmware(self, context, path, vrf_management_name):
pass
@abstractmethod
def shutdown(self, context):
pass
|
1ea27e8989657bb35dd37b6ee2e038e1358fbc96
|
social_core/backends/globus.py
|
social_core/backends/globus.py
|
"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
Set a JWT signature algorithm for the Globus backend to RS512
|
Set a JWT signature algorithm for the Globus backend to RS512
|
Python
|
bsd-3-clause
|
python-social-auth/social-core,python-social-auth/social-core
|
"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
Set a JWT signature algorithm for the Globus backend to RS512
|
"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
<commit_before>"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
<commit_msg>Set a JWT signature algorithm for the Globus backend to RS512<commit_after>
|
"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
Set a JWT signature algorithm for the Globus backend to RS512"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
<commit_before>"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
<commit_msg>Set a JWT signature algorithm for the Globus backend to RS512<commit_after>"""
Globus Auth OpenID Connect backend, docs at:
https://docs.globus.org/api/auth
http://globus-integration-examples.readthedocs.io
"""
from social_core.backends.open_id_connect import OpenIdConnectAuth
class GlobusOpenIdConnect(OpenIdConnectAuth):
name = 'globus'
OIDC_ENDPOINT = 'https://auth.globus.org'
JWT_ALGORITHMS = ['RS256', 'RS512']
EXTRA_DATA = [
('expires_in', 'expires_in', True),
('refresh_token', 'refresh_token', True),
('id_token', 'id_token', True),
('other_tokens', 'other_tokens', True),
]
def get_user_details(self, response):
username_key = self.setting('USERNAME_KEY', default=self.USERNAME_KEY)
name = response.get('name') or ''
fullname, first_name, last_name = self.get_user_names(name)
return {'username': response.get(username_key),
'email': response.get('email'),
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
|
1ba67d0288fbe2800e5aed685580bdc37383a61c
|
knowledge_repo/postprocessors/format_checks.py
|
knowledge_repo/postprocessors/format_checks.py
|
from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, _ in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, \
"Required field `{field}` missing from headers."
for field, typ, _ in \
HEADER_REQUIRED_FIELD_TYPES + HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
header_field = headers[field]
assert isinstance(header_field, typ), \
f"Value for field `{field}` is of type " + \
f"{type(header_field)}, and needs to be of type {typ}."
|
from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, "Required field `{}` missing from headers.".format(
field)
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
|
Revert the latest changes due to CI build failure
|
Revert the latest changes due to CI build failure
|
Python
|
apache-2.0
|
airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo
|
from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, _ in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, \
"Required field `{field}` missing from headers."
for field, typ, _ in \
HEADER_REQUIRED_FIELD_TYPES + HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
header_field = headers[field]
assert isinstance(header_field, typ), \
f"Value for field `{field}` is of type " + \
f"{type(header_field)}, and needs to be of type {typ}."
Revert the latest changes due to CI build failure
|
from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, "Required field `{}` missing from headers.".format(
field)
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
|
<commit_before>from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, _ in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, \
"Required field `{field}` missing from headers."
for field, typ, _ in \
HEADER_REQUIRED_FIELD_TYPES + HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
header_field = headers[field]
assert isinstance(header_field, typ), \
f"Value for field `{field}` is of type " + \
f"{type(header_field)}, and needs to be of type {typ}."
<commit_msg>Revert the latest changes due to CI build failure<commit_after>
|
from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, "Required field `{}` missing from headers.".format(
field)
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
|
from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, _ in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, \
"Required field `{field}` missing from headers."
for field, typ, _ in \
HEADER_REQUIRED_FIELD_TYPES + HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
header_field = headers[field]
assert isinstance(header_field, typ), \
f"Value for field `{field}` is of type " + \
f"{type(header_field)}, and needs to be of type {typ}."
Revert the latest changes due to CI build failurefrom ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, "Required field `{}` missing from headers.".format(
field)
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
|
<commit_before>from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, _ in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, \
"Required field `{field}` missing from headers."
for field, typ, _ in \
HEADER_REQUIRED_FIELD_TYPES + HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
header_field = headers[field]
assert isinstance(header_field, typ), \
f"Value for field `{field}` is of type " + \
f"{type(header_field)}, and needs to be of type {typ}."
<commit_msg>Revert the latest changes due to CI build failure<commit_after>from ..constants import FORMAT_CHECKS
from ..post import HEADER_OPTIONAL_FIELD_TYPES, HEADER_REQUIRED_FIELD_TYPES
from ..postprocessor import KnowledgePostProcessor
class FormatChecks(KnowledgePostProcessor):
_registry_keys = [FORMAT_CHECKS]
def process(self, kp):
headers = kp.headers
for field, typ, input in HEADER_REQUIRED_FIELD_TYPES:
assert field in headers, "Required field `{}` missing from headers.".format(
field)
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
for field, typ, input in HEADER_OPTIONAL_FIELD_TYPES:
if field in headers:
assert isinstance(headers[field], typ), "Value for field `{}` is of type {}, and needs to be of type {}.".format(
field, type(headers[field]), typ)
|
0c1caf49a18bcd862247cdca7a4efe2f6fc02d93
|
wafer/management/commands/wafer_talk_video_reviewers.py
|
wafer/management/commands/wafer_talk_video_reviewers.py
|
import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [x.encode("utf-8") for x in (
talk.title,
talk.get_authors_display_name(),
reviewer,
)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
|
import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [talk.title,
talk.get_authors_display_name(),
reviewer,
]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
|
Drop python2-era manual encode dance
|
Drop python2-era manual encode dance
|
Python
|
isc
|
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
|
import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [x.encode("utf-8") for x in (
talk.title,
talk.get_authors_display_name(),
reviewer,
)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
Drop python2-era manual encode dance
|
import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [talk.title,
talk.get_authors_display_name(),
reviewer,
]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
|
<commit_before>import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [x.encode("utf-8") for x in (
talk.title,
talk.get_authors_display_name(),
reviewer,
)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
<commit_msg>Drop python2-era manual encode dance<commit_after>
|
import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [talk.title,
talk.get_authors_display_name(),
reviewer,
]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
|
import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [x.encode("utf-8") for x in (
talk.title,
talk.get_authors_display_name(),
reviewer,
)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
Drop python2-era manual encode danceimport sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [talk.title,
talk.get_authors_display_name(),
reviewer,
]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
|
<commit_before>import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [x.encode("utf-8") for x in (
talk.title,
talk.get_authors_display_name(),
reviewer,
)]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
<commit_msg>Drop python2-era manual encode dance<commit_after>import sys
import csv
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
from wafer.talks.models import Talk, ACCEPTED, PROVISIONAL
class Command(BaseCommand):
help = ("List talks and the associated video_reviewer emails."
" Only reviewers for accepted talks are listed")
def _video_reviewers(self, options):
talks = Talk.objects.filter(status=ACCEPTED)
csv_file = csv.writer(sys.stdout)
for talk in talks:
reviewer = talk.video_reviewer
if not reviewer:
reviewer = 'NO REVIEWER'
row = [talk.title,
talk.get_authors_display_name(),
reviewer,
]
csv_file.writerow(row)
def handle(self, *args, **options):
self._video_reviewers(options)
|
3b105973a6aad7885fd56182ad32e2731de9a432
|
django_evolution/compat/patches/sqlite_legacy_alter_table.py
|
django_evolution/compat/patches/sqlite_legacy_alter_table.py
|
"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
from django.db.backends.sqlite3.base import DatabaseWrapper
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
|
"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
from django.db.backends.sqlite3.base import DatabaseWrapper
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
|
Fix a premature import when patching SQLite compatibility.
|
Fix a premature import when patching SQLite compatibility.
We provide a compatibility patch that fixes certain versions of SQLite
with Django prior to 2.1.5.
This patch made the assumption that it could import the Django SQLite
backend at the module level, since SQLite is built into Python. However,
on modern versions of Django, this will fail to import if the version of
SQLite is too old.
We now import this only if we're about to apply the patch, in which case
we've already confirmed the compatible version range.
Testing Done:
Tested on reviews.reviewboard.org, where this problem was first encountered
due to an older SQLite. We no longer hit a premature import.
Reviewed at https://reviews.reviewboard.org/r/12414/
|
Python
|
bsd-3-clause
|
beanbaginc/django-evolution
|
"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
from django.db.backends.sqlite3.base import DatabaseWrapper
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
Fix a premature import when patching SQLite compatibility.
We provide a compatibility patch that fixes certain versions of SQLite
with Django prior to 2.1.5.
This patch made the assumption that it could import the Django SQLite
backend at the module level, since SQLite is built into Python. However,
on modern versions of Django, this will fail to import if the version of
SQLite is too old.
We now import this only if we're about to apply the patch, in which case
we've already confirmed the compatible version range.
Testing Done:
Tested on reviews.reviewboard.org, where this problem was first encountered
due to an older SQLite. We no longer hit a premature import.
Reviewed at https://reviews.reviewboard.org/r/12414/
|
"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
from django.db.backends.sqlite3.base import DatabaseWrapper
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
|
<commit_before>"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
from django.db.backends.sqlite3.base import DatabaseWrapper
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
<commit_msg>Fix a premature import when patching SQLite compatibility.
We provide a compatibility patch that fixes certain versions of SQLite
with Django prior to 2.1.5.
This patch made the assumption that it could import the Django SQLite
backend at the module level, since SQLite is built into Python. However,
on modern versions of Django, this will fail to import if the version of
SQLite is too old.
We now import this only if we're about to apply the patch, in which case
we've already confirmed the compatible version range.
Testing Done:
Tested on reviews.reviewboard.org, where this problem was first encountered
due to an older SQLite. We no longer hit a premature import.
Reviewed at https://reviews.reviewboard.org/r/12414/<commit_after>
|
"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
from django.db.backends.sqlite3.base import DatabaseWrapper
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
|
"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
from django.db.backends.sqlite3.base import DatabaseWrapper
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
Fix a premature import when patching SQLite compatibility.
We provide a compatibility patch that fixes certain versions of SQLite
with Django prior to 2.1.5.
This patch made the assumption that it could import the Django SQLite
backend at the module level, since SQLite is built into Python. However,
on modern versions of Django, this will fail to import if the version of
SQLite is too old.
We now import this only if we're about to apply the patch, in which case
we've already confirmed the compatible version range.
Testing Done:
Tested on reviews.reviewboard.org, where this problem was first encountered
due to an older SQLite. We no longer hit a premature import.
Reviewed at https://reviews.reviewboard.org/r/12414/"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
from django.db.backends.sqlite3.base import DatabaseWrapper
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
|
<commit_before>"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
from django.db.backends.sqlite3.base import DatabaseWrapper
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
<commit_msg>Fix a premature import when patching SQLite compatibility.
We provide a compatibility patch that fixes certain versions of SQLite
with Django prior to 2.1.5.
This patch made the assumption that it could import the Django SQLite
backend at the module level, since SQLite is built into Python. However,
on modern versions of Django, this will fail to import if the version of
SQLite is too old.
We now import this only if we're about to apply the patch, in which case
we've already confirmed the compatible version range.
Testing Done:
Tested on reviews.reviewboard.org, where this problem was first encountered
due to an older SQLite. We no longer hit a premature import.
Reviewed at https://reviews.reviewboard.org/r/12414/<commit_after>"""Patch to enable SQLite Legacy Alter Table support."""
from __future__ import unicode_literals
import sqlite3
import django
def needs_patch():
"""Return whether the SQLite backend needs patching.
It will need patching if using Django 1.11 through 2.1.4 while using
SQLite3 v2.26.
Returns:
bool:
``True`` if the backend needs to be patched. ``False`` if it does not.
"""
return (sqlite3.sqlite_version_info > (2, 26, 0) and
(1, 11) <= django.VERSION < (2, 1, 5))
def apply_patch():
"""Apply a patch to the SQLite database backend.
This will turn on SQLite's ``legacy_alter_table`` mode on when modifying
the schema, which is needed in order to successfully allow Django to make
table modifications.
"""
from django.db.backends.sqlite3.base import DatabaseWrapper
class DatabaseSchemaEditor(DatabaseWrapper.SchemaEditorClass):
def __enter__(self):
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = ON')
return super(DatabaseSchemaEditor, self).__enter__()
def __exit__(self, *args, **kwargs):
super(DatabaseSchemaEditor, self).__exit__(*args, **kwargs)
with self.connection.cursor() as c:
c.execute('PRAGMA legacy_alter_table = OFF')
DatabaseWrapper.SchemaEditorClass = DatabaseSchemaEditor
|
a4b20f88eeafa3667cc47c92736241069f278e9e
|
plata/product/producer/models.py
|
plata/product/producer/models.py
|
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
|
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product # FIXME
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
|
Add FIXME in product import in producer module
|
Add FIXME in product import in producer module
|
Python
|
bsd-3-clause
|
armicron/plata,armicron/plata,armicron/plata,stefanklug/plata,allink/plata
|
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
Add FIXME in product import in producer module
|
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product # FIXME
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
|
<commit_before>from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
<commit_msg>Add FIXME in product import in producer module<commit_after>
|
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product # FIXME
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
|
from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
Add FIXME in product import in producer modulefrom datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product # FIXME
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
|
<commit_before>from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
<commit_msg>Add FIXME in product import in producer module<commit_after>from datetime import datetime
from django.db import models
from django.db.models import Sum, signals
from django.utils.translation import ugettext_lazy as _
from plata.product.modules.options.models import Product # FIXME
class ProducerManager(models.Manager):
def active(self):
return self.filter(is_active=True)
class Producer(models.Model):
"""
Optional producer field for every product
"""
is_active = models.BooleanField(_('is active'), default=True)
name = models.CharField(_('name'), max_length=100)
slug = models.SlugField(_('slug'), unique=True)
ordering = models.PositiveIntegerField(_('ordering'), default=0)
description = models.TextField(_('description'), blank=True)
class Meta:
app_label = 'product'
ordering = ['ordering', 'name']
verbose_name = _('producer')
verbose_name_plural = _('producers')
def __unicode__(self):
return self.name
Product.add_to_class('producer', models.ForeignKey(Producer, blank=True, null=True,
related_name='products', verbose_name=_('producer')))
|
81e9e9f17c903b1e762df4e9dfab4edceaaeacd4
|
python_apps/pypo/pypo/timeout.py
|
python_apps/pypo/pypo/timeout.py
|
import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.isAlive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
|
import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.is_alive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
|
Rename isAlive method to is_alive
|
Rename isAlive method to is_alive
Python 3.9 compatibility fix
|
Python
|
agpl-3.0
|
LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime,LibreTime/libretime
|
import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.isAlive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
Rename isAlive method to is_alive
Python 3.9 compatibility fix
|
import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.is_alive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
|
<commit_before>import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.isAlive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
<commit_msg>Rename isAlive method to is_alive
Python 3.9 compatibility fix<commit_after>
|
import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.is_alive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
|
import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.isAlive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
Rename isAlive method to is_alive
Python 3.9 compatibility fiximport threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.is_alive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
|
<commit_before>import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.isAlive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
<commit_msg>Rename isAlive method to is_alive
Python 3.9 compatibility fix<commit_after>import threading
from . import pypofetch
def __timeout(func, timeout_duration, default, args, kwargs):
class InterruptableThread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = default
def run(self):
self.result = func(*args, **kwargs)
first_attempt = True
while True:
it = InterruptableThread()
it.start()
if not first_attempt:
timeout_duration = timeout_duration * 2
it.join(timeout_duration)
if it.is_alive():
"""Restart Liquidsoap and try the command one more time. If it
fails again then there is something critically wrong..."""
if first_attempt:
# restart liquidsoap
pypofetch.PypoFetch.ref.restart_liquidsoap()
else:
raise Exception("Thread did not terminate")
else:
return it.result
first_attempt = False
def ls_timeout(f, timeout=15, default=None):
def new_f(*args, **kwargs):
return __timeout(f, timeout, default, args, kwargs)
return new_f
|
8b125544a49b6ea0c4e54d934f390a96e1efe105
|
pywaw/misc/context_processors.py
|
pywaw/misc/context_processors.py
|
import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'hg tip --template {node}'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
|
import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'git rev-parse HEAD'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
|
Change from hg to git.
|
Change from hg to git.
|
Python
|
mit
|
PyWaw/pywaw.org,PyWaw/pywaw.org,PyWaw/pywaw.org
|
import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'hg tip --template {node}'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
Change from hg to git.
|
import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'git rev-parse HEAD'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
|
<commit_before>import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'hg tip --template {node}'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
<commit_msg>Change from hg to git.<commit_after>
|
import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'git rev-parse HEAD'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
|
import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'hg tip --template {node}'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
Change from hg to git.import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'git rev-parse HEAD'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
|
<commit_before>import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'hg tip --template {node}'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
<commit_msg>Change from hg to git.<commit_after>import functools
import subprocess
import django
import platform
from django.conf import settings
from django.contrib.sites.models import get_current_site
def system_info(request):
return {
'system': {
'django': django.get_version(),
'python': platform.python_version(),
'website': get_website_version(),
}
}
def current_site(request):
return {
'site': get_current_site(request),
}
@functools.lru_cache(maxsize=1)
def get_website_version():
command = 'git rev-parse HEAD'
process = subprocess.Popen(command.split(), stdout=subprocess.PIPE, cwd=settings.PROJECT_ROOT)
return process.communicate()[0]
|
e9df5070abcea31907479630810a64a007ff1f06
|
quotes_page/urls.py
|
quotes_page/urls.py
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'quotes_page.core.views.main', name="main"),
url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
#url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
Disable init view for now, add extra url for specific quotes page
|
Disable init view for now, add extra url for specific quotes page
|
Python
|
mit
|
kirberich/qicrawler,kirberich/qicrawler
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
Disable init view for now, add extra url for specific quotes page
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'quotes_page.core.views.main', name="main"),
url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
#url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
<commit_before>from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
<commit_msg>Disable init view for now, add extra url for specific quotes page<commit_after>
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'quotes_page.core.views.main', name="main"),
url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
#url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
Disable init view for now, add extra url for specific quotes pagefrom django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'quotes_page.core.views.main', name="main"),
url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
#url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
<commit_before>from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^(?P<quote_id>\d*)$', 'quotes_page.core.views.main', name="main"),
url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
<commit_msg>Disable init view for now, add extra url for specific quotes page<commit_after>from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'qi.views.home', name='home'),
# url(r'^qi/', include('qi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'quotes_page.core.views.main', name="main"),
url(r'^(?P<quote_id>\d+)/?$', 'quotes_page.core.views.quote', name="quote"),
#url(r'^init/?$', 'quotes_page.core.views.init', name="init"),
url(r'^stats/?$', 'quotes_page.core.views.stats', name="stats"),
)
|
fbbf141331c27dfe88d5877cbd1b5bbd54356e0b
|
pidman/pid/migrations/0002_pid_sequence_initial_value.py
|
pidman/pid/migrations/0002_pid_sequence_initial_value.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
Update 1.0 release with latest version of pid sequence migration
|
Update 1.0 release with latest version of pid sequence migration
|
Python
|
apache-2.0
|
emory-libraries/pidman,emory-libraries/pidman
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
Update 1.0 release with latest version of pid sequence migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
<commit_msg>Update 1.0 release with latest version of pid sequence migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
Update 1.0 release with latest version of pid sequence migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
<commit_msg>Update 1.0 release with latest version of pid sequence migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid, encode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
# pid noids are generated in sequence, so the pid with the
# highest pk _should_ be the one with the highest noid
max_noid = Pid.objects.all().order_by('pk').last().pid
# (previously using aggregate max, but doesn't seem to find
# the highest pid value correctly)
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
eede9136f4a605100674cf981ca9e30782d3bf7f
|
temba/msgs/migrations/0038_broadcast_purged.py
|
temba/msgs/migrations/0038_broadcast_purged.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(default=False, help_text='If the messages for this broadcast have been purged'),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(null=True, help_text='If the messages for this broadcast have been purged'),
),
]
|
Tweak migration to apply quickly
|
Tweak migration to apply quickly
|
Python
|
agpl-3.0
|
pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,ewheeler/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,ewheeler/rapidpro,reyrodrigues/EU-SMS,reyrodrigues/EU-SMS,ewheeler/rapidpro,tsotetsi/textily-web,reyrodrigues/EU-SMS,pulilab/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(default=False, help_text='If the messages for this broadcast have been purged'),
),
]
Tweak migration to apply quickly
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(null=True, help_text='If the messages for this broadcast have been purged'),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(default=False, help_text='If the messages for this broadcast have been purged'),
),
]
<commit_msg>Tweak migration to apply quickly<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(null=True, help_text='If the messages for this broadcast have been purged'),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(default=False, help_text='If the messages for this broadcast have been purged'),
),
]
Tweak migration to apply quickly# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(null=True, help_text='If the messages for this broadcast have been purged'),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(default=False, help_text='If the messages for this broadcast have been purged'),
),
]
<commit_msg>Tweak migration to apply quickly<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0037_backfill_recipient_counts'),
]
operations = [
migrations.AddField(
model_name='broadcast',
name='purged',
field=models.BooleanField(null=True, help_text='If the messages for this broadcast have been purged'),
),
]
|
609fcedf1aa90e0022c72121865452b3cbdd0ba3
|
icekit/plugins/content_listing/forms.py
|
icekit/plugins/content_listing/forms.py
|
from fluent_contents.forms import ContentItemForm
#from icekit.content_collections.abstract_models import AbstractCollectedContent
from .models import ContentListingItem
class ContentListingAdminForm(ContentItemForm):
class Meta:
model = ContentListingItem
fields = '__all__'
# def __init__(self, *args, **kwargs):
# super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# # TODO Restrict content types to those for models that are subclasses
# # of `AbstractCollectedContent`?
# valid_ct_ids = []
# cts_qs = self.fields['content_type'].queryset.all()
# for ct in cts_qs:
# model = ct.model_class()
# if model and issubclass(model, AbstractCollectedContent):
# valid_ct_ids.append(ct.id)
# cts_qs = self.fields['content_type'].queryset = \
# cts_qs.filter(pk__in=valid_ct_ids)
|
from django.forms import ModelChoiceField
from django.contrib.contenttypes.models import ContentType
from fluent_contents.forms import ContentItemForm
from .models import ContentListingItem
class ContentTypeModelChoiceField(ModelChoiceField):
def label_from_instance(self, content_type):
return u".".join(content_type.natural_key())
class ContentListingAdminForm(ContentItemForm):
content_type = ContentTypeModelChoiceField(
queryset=ContentType.objects.all()
)
class Meta:
model = ContentListingItem
fields = '__all__'
def __init__(self, *args, **kwargs):
super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# Apply `filter_content_types` filter
self.fields['content_type'].queryset = self.filter_content_types(
self.fields['content_type'].queryset)
def filter_content_types(self, content_type_qs):
"""
Filter the content types selectable for the content listing.
Example to restrict content types to those for models that are
subclasses of `AbstractCollectedContent`:
valid_ct_ids = []
for ct in content_type_qs:
model = ct.model_class()
if model and issubclass(model, AbstractCollectedContent):
valid_ct_ids.append(ct.id)
return content_type_qs.filter(pk__in=valid_ct_ids)
"""
return content_type_qs
|
Improve content listing plugin's admin form
|
Improve content listing plugin's admin form
- show natural key of content types in select field to disambiguate
the SELECT field listing in the admin
- add `filter_content_types` method to form to simplify filtering the
selectable content types in derived plugins.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
from fluent_contents.forms import ContentItemForm
#from icekit.content_collections.abstract_models import AbstractCollectedContent
from .models import ContentListingItem
class ContentListingAdminForm(ContentItemForm):
class Meta:
model = ContentListingItem
fields = '__all__'
# def __init__(self, *args, **kwargs):
# super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# # TODO Restrict content types to those for models that are subclasses
# # of `AbstractCollectedContent`?
# valid_ct_ids = []
# cts_qs = self.fields['content_type'].queryset.all()
# for ct in cts_qs:
# model = ct.model_class()
# if model and issubclass(model, AbstractCollectedContent):
# valid_ct_ids.append(ct.id)
# cts_qs = self.fields['content_type'].queryset = \
# cts_qs.filter(pk__in=valid_ct_ids)
Improve content listing plugin's admin form
- show natural key of content types in select field to disambiguate
the SELECT field listing in the admin
- add `filter_content_types` method to form to simplify filtering the
selectable content types in derived plugins.
|
from django.forms import ModelChoiceField
from django.contrib.contenttypes.models import ContentType
from fluent_contents.forms import ContentItemForm
from .models import ContentListingItem
class ContentTypeModelChoiceField(ModelChoiceField):
def label_from_instance(self, content_type):
return u".".join(content_type.natural_key())
class ContentListingAdminForm(ContentItemForm):
content_type = ContentTypeModelChoiceField(
queryset=ContentType.objects.all()
)
class Meta:
model = ContentListingItem
fields = '__all__'
def __init__(self, *args, **kwargs):
super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# Apply `filter_content_types` filter
self.fields['content_type'].queryset = self.filter_content_types(
self.fields['content_type'].queryset)
def filter_content_types(self, content_type_qs):
"""
Filter the content types selectable for the content listing.
Example to restrict content types to those for models that are
subclasses of `AbstractCollectedContent`:
valid_ct_ids = []
for ct in content_type_qs:
model = ct.model_class()
if model and issubclass(model, AbstractCollectedContent):
valid_ct_ids.append(ct.id)
return content_type_qs.filter(pk__in=valid_ct_ids)
"""
return content_type_qs
|
<commit_before>from fluent_contents.forms import ContentItemForm
#from icekit.content_collections.abstract_models import AbstractCollectedContent
from .models import ContentListingItem
class ContentListingAdminForm(ContentItemForm):
class Meta:
model = ContentListingItem
fields = '__all__'
# def __init__(self, *args, **kwargs):
# super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# # TODO Restrict content types to those for models that are subclasses
# # of `AbstractCollectedContent`?
# valid_ct_ids = []
# cts_qs = self.fields['content_type'].queryset.all()
# for ct in cts_qs:
# model = ct.model_class()
# if model and issubclass(model, AbstractCollectedContent):
# valid_ct_ids.append(ct.id)
# cts_qs = self.fields['content_type'].queryset = \
# cts_qs.filter(pk__in=valid_ct_ids)
<commit_msg>Improve content listing plugin's admin form
- show natural key of content types in select field to disambiguate
the SELECT field listing in the admin
- add `filter_content_types` method to form to simplify filtering the
selectable content types in derived plugins.<commit_after>
|
from django.forms import ModelChoiceField
from django.contrib.contenttypes.models import ContentType
from fluent_contents.forms import ContentItemForm
from .models import ContentListingItem
class ContentTypeModelChoiceField(ModelChoiceField):
def label_from_instance(self, content_type):
return u".".join(content_type.natural_key())
class ContentListingAdminForm(ContentItemForm):
content_type = ContentTypeModelChoiceField(
queryset=ContentType.objects.all()
)
class Meta:
model = ContentListingItem
fields = '__all__'
def __init__(self, *args, **kwargs):
super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# Apply `filter_content_types` filter
self.fields['content_type'].queryset = self.filter_content_types(
self.fields['content_type'].queryset)
def filter_content_types(self, content_type_qs):
"""
Filter the content types selectable for the content listing.
Example to restrict content types to those for models that are
subclasses of `AbstractCollectedContent`:
valid_ct_ids = []
for ct in content_type_qs:
model = ct.model_class()
if model and issubclass(model, AbstractCollectedContent):
valid_ct_ids.append(ct.id)
return content_type_qs.filter(pk__in=valid_ct_ids)
"""
return content_type_qs
|
from fluent_contents.forms import ContentItemForm
#from icekit.content_collections.abstract_models import AbstractCollectedContent
from .models import ContentListingItem
class ContentListingAdminForm(ContentItemForm):
class Meta:
model = ContentListingItem
fields = '__all__'
# def __init__(self, *args, **kwargs):
# super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# # TODO Restrict content types to those for models that are subclasses
# # of `AbstractCollectedContent`?
# valid_ct_ids = []
# cts_qs = self.fields['content_type'].queryset.all()
# for ct in cts_qs:
# model = ct.model_class()
# if model and issubclass(model, AbstractCollectedContent):
# valid_ct_ids.append(ct.id)
# cts_qs = self.fields['content_type'].queryset = \
# cts_qs.filter(pk__in=valid_ct_ids)
Improve content listing plugin's admin form
- show natural key of content types in select field to disambiguate
the SELECT field listing in the admin
- add `filter_content_types` method to form to simplify filtering the
selectable content types in derived plugins.from django.forms import ModelChoiceField
from django.contrib.contenttypes.models import ContentType
from fluent_contents.forms import ContentItemForm
from .models import ContentListingItem
class ContentTypeModelChoiceField(ModelChoiceField):
def label_from_instance(self, content_type):
return u".".join(content_type.natural_key())
class ContentListingAdminForm(ContentItemForm):
content_type = ContentTypeModelChoiceField(
queryset=ContentType.objects.all()
)
class Meta:
model = ContentListingItem
fields = '__all__'
def __init__(self, *args, **kwargs):
super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# Apply `filter_content_types` filter
self.fields['content_type'].queryset = self.filter_content_types(
self.fields['content_type'].queryset)
def filter_content_types(self, content_type_qs):
"""
Filter the content types selectable for the content listing.
Example to restrict content types to those for models that are
subclasses of `AbstractCollectedContent`:
valid_ct_ids = []
for ct in content_type_qs:
model = ct.model_class()
if model and issubclass(model, AbstractCollectedContent):
valid_ct_ids.append(ct.id)
return content_type_qs.filter(pk__in=valid_ct_ids)
"""
return content_type_qs
|
<commit_before>from fluent_contents.forms import ContentItemForm
#from icekit.content_collections.abstract_models import AbstractCollectedContent
from .models import ContentListingItem
class ContentListingAdminForm(ContentItemForm):
class Meta:
model = ContentListingItem
fields = '__all__'
# def __init__(self, *args, **kwargs):
# super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# # TODO Restrict content types to those for models that are subclasses
# # of `AbstractCollectedContent`?
# valid_ct_ids = []
# cts_qs = self.fields['content_type'].queryset.all()
# for ct in cts_qs:
# model = ct.model_class()
# if model and issubclass(model, AbstractCollectedContent):
# valid_ct_ids.append(ct.id)
# cts_qs = self.fields['content_type'].queryset = \
# cts_qs.filter(pk__in=valid_ct_ids)
<commit_msg>Improve content listing plugin's admin form
- show natural key of content types in select field to disambiguate
the SELECT field listing in the admin
- add `filter_content_types` method to form to simplify filtering the
selectable content types in derived plugins.<commit_after>from django.forms import ModelChoiceField
from django.contrib.contenttypes.models import ContentType
from fluent_contents.forms import ContentItemForm
from .models import ContentListingItem
class ContentTypeModelChoiceField(ModelChoiceField):
def label_from_instance(self, content_type):
return u".".join(content_type.natural_key())
class ContentListingAdminForm(ContentItemForm):
content_type = ContentTypeModelChoiceField(
queryset=ContentType.objects.all()
)
class Meta:
model = ContentListingItem
fields = '__all__'
def __init__(self, *args, **kwargs):
super(ContentListingAdminForm, self).__init__(*args, **kwargs)
# Apply `filter_content_types` filter
self.fields['content_type'].queryset = self.filter_content_types(
self.fields['content_type'].queryset)
def filter_content_types(self, content_type_qs):
"""
Filter the content types selectable for the content listing.
Example to restrict content types to those for models that are
subclasses of `AbstractCollectedContent`:
valid_ct_ids = []
for ct in content_type_qs:
model = ct.model_class()
if model and issubclass(model, AbstractCollectedContent):
valid_ct_ids.append(ct.id)
return content_type_qs.filter(pk__in=valid_ct_ids)
"""
return content_type_qs
|
b1bfd9630ef049070b0cd6ae215470d3d1facd40
|
django/contrib/messages/views.py
|
django/contrib/messages/views.py
|
from django.views.generic.edit import FormMixin
from django.contrib import messages
class SuccessMessageMixin(FormMixin):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
from django.contrib import messages
class SuccessMessageMixin(object):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
Remove unnecessary and problematic parent class from SuccessMessageMixin
|
Remove unnecessary and problematic parent class from SuccessMessageMixin
refs #16319, thanks to bmispelon for the catch
|
Python
|
bsd-3-clause
|
xadahiya/django,anant-dev/django,AltSchool/django,huang4fstudio/django,ryanahall/django,gunchleoc/django,andreif/django,vmarkovtsev/django,avneesh91/django,makinacorpus/django,ArnossArnossi/django,mitchelljkotler/django,avanov/django,sdcooke/django,denis-pitul/django,chyeh727/django,jhoos/django,ataylor32/django,piquadrat/django,SoftwareMaven/django,mttr/django,joakim-hove/django,akshatharaj/django,devops2014/djangosite,jejimenez/django,katrid/django,tuhangdi/django,archen/django,oscaro/django,blueyed/django,MarcJoan/django,GhostThrone/django,megaumi/django,jhoos/django,edmorley/django,theo-l/django,jscn/django,denys-duchier/django,frdb194/django,hcsturix74/django,koordinates/django,tayfun/django,makinacorpus/django,arun6582/django,frishberg/django,Korkki/django,avanov/django,weiawe/django,marcelocure/django,reinout/django,davidharrigan/django,hackerbot/DjangoDev,darkryder/django,sjlehtin/django,hackerbot/DjangoDev,epandurski/django,jgoclawski/django,wkschwartz/django,AltSchool/django,mcardillo55/django,hybrideagle/django,ticosax/django,django/django,ataylor32/django,maxsocl/django,andyzsf/django,helenst/django,camilonova/django,seanwestfall/django,quxiaolong1504/django,henryfjordan/django,dpetzold/django,delhivery/django,jyotsna1820/django,tragiclifestories/django,eyohansa/django,takis/django,kholidfu/django,robhudson/django,arun6582/django,alimony/django,alrifqi/django,ironbox360/django,epandurski/django,syphar/django,krishna-pandey-git/django,darjeeling/django,Argon-Zhou/django,yceruto/django,PolicyStat/django,curtisstpierre/django,gdub/django,jvkops/django,erikr/django,szopu/django,KokareIITP/django,elky/django,dpetzold/django,caotianwei/django,dwightgunning/django,ticosax/django,MarkusH/django,seanwestfall/django,baylee/django,RevelSystems/django,AndrewGrossman/django,Yong-Lee/django,chyeh727/django,adelton/django,rapilabs/django,syaiful6/django,mcrowson/django,sam-tsai/django,ajoaoff/django,ericfc/django,eugena/django,shownomercy/django,tcwicklund/django,elijah513/django,GaussDing/django,leekchan/django_test,MatthewWilkes/django,andela-ooladayo/django,piquadrat/django,savoirfairelinux/django,oberlin/django,phalt/django,barbuza/django,ironbox360/django,extremewaysback/django,DrMeers/django,SujaySKumar/django,ataylor32/django,deployed/django,Y3K/django,wweiradio/django,Matt-Deacalion/django,ecederstrand/django,savoirfairelinux/django,darkryder/django,WSDC-NITWarangal/django,whs/django,vincepandolfo/django,tuhangdi/django,petecummings/django,andela-ooladayo/django,ifduyue/django,mrbox/django,ptoraskar/django,dgladkov/django,dudepare/django,carljm/django,Nepherhotep/django,willharris/django,anant-dev/django,mcella/django,bobcyw/django,raphaelmerx/django,poiati/django,gunchleoc/django,quamilek/django,willhardy/django,megaumi/django,nju520/django,blighj/django,taaviteska/django,jaywreddy/django,jasonbot/django,xwolf12/django,claudep/django,hybrideagle/django,ar45/django,elky/django,varunnaganathan/django,benjaminjkraft/django,etos/django,pipermerriam/django,fafaman/django,simonw/django,himleyb85/django,avanov/django,dgladkov/django,kevintaw/django,IRI-Research/django,hassanabidpk/django,blighj/django,hunter007/django,takeshineshiro/django,treyhunner/django,chyeh727/django,felixjimenez/django,ar45/django,timgraham/django,payeldillip/django,jdelight/django,davidharrigan/django,django-nonrel/django,Adnn/django,SujaySKumar/django,YangSongzhou/django,baylee/django,leeon/annotated-django,ulope/django,jpic/django,PetrDlouhy/django,matiasb/django,zhoulingjun/django,nealtodd/django,techdragon/django,lsqtongxin/django,charettes/django,ebar0n/django,ziima/django,rsalmaso/django,beni55/django,nealtodd/django,ghedsouza/django,mmardini/django,Y3K/django,techdragon/django,kcpawan/django,mitchelljkotler/django,TimBuckley/effective_django,sopier/django,bikong2/django,zhoulingjun/django,dex4er/django,dpetzold/django,mitya57/django,hottwaj/django,ericholscher/django,django/django,feroda/django,krishna-pandey-git/django,kisna72/django,dfunckt/django,vitaly4uk/django,huang4fstudio/django,liavkoren/djangoDev,adelton/django,syaiful6/django,peterlauri/django,AndrewGrossman/django,MoritzS/django,takeshineshiro/django,wetneb/django,AndrewGrossman/django,dbaxa/django,mattrobenolt/django,bak1an/django,tomchristie/django,rhertzog/django,ojake/django,mshafiq9/django,gannetson/django,poiati/django,jasonwzhy/django,evansd/django,sergei-maertens/django,jeezybrick/django,wsmith323/django,eyohansa/django,waytai/django,xwolf12/django,rsalmaso/django,andresgz/django,irwinlove/django,akaariai/django,adamchainz/django,dudepare/django,auvipy/django,HonzaKral/django,djbaldey/django,aspidites/django,kamyu104/django,ojengwa/django-1,DONIKAN/django,spisneha25/django,taaviteska/django,evansd/django,waytai/django,ABaldwinHunter/django-clone-classic,gengue/django,lsqtongxin/django,fpy171/django,mewtaylor/django,Anonymous-X6/django,maxsocl/django,maxsocl/django,ticosax/django,ryangallen/django,jvkops/django,MoritzS/django,KokareIITP/django,shtouff/django,georgemarshall/django,dfdx2/django,krisys/django,KokareIITP/django,rhertzog/django,salamer/django,BrotherPhil/django,tomchristie/django,BMJHayward/django,tuhangdi/django,Beauhurst/django,waytai/django,z0by/django,lunafeng/django,xrmx/django,asser/django,sdcooke/django,myang321/django,gcd0318/django,zsiciarz/django,peterlauri/django,asser/django,yewang15215/django,haxoza/django,sadaf2605/django,EmadMokhtar/Django,jyotsna1820/django,unaizalakain/django,benjaminjkraft/django,blindroot/django,stevenewey/django,filias/django,EliotBerriot/django,NullSoldier/django,DrMeers/django,runekaagaard/django-contrib-locking,elena/django,ataylor32/django,rrrene/django,stevenewey/django,Matt-Deacalion/django,frePPLe/django,hnakamur/django,nemesisdesign/django,ytjiang/django,alilotfi/django,doismellburning/django,frdb194/django,jyotsna1820/django,whs/django,marqueedev/django,edmorley/django,mattseymour/django,seanwestfall/django,Balachan27/django,atul-bhouraskar/django,yigitguler/django,peterlauri/django,hkchenhongyi/django,djbaldey/django,solarissmoke/django,ajoaoff/django,Matt-Deacalion/django,SebasSBM/django,abomyi/django,kholidfu/django,craynot/django,hynekcer/django,twz915/django,sephii/django,marissazhou/django,camilonova/django,ivandevp/django,alimony/django,RossBrunton/django,denis-pitul/django,areski/django,dsanders11/django,dydek/django,tragiclifestories/django,djbaldey/django,aisipos/django,camilonova/django,vitaly4uk/django,programadorjc/django,rapilabs/django,googleinterns/django,ghickman/django,tcwicklund/django,pquentin/django,zsiciarz/django,rajsadho/django,WSDC-NITWarangal/django,jn7163/django,phalt/django,DONIKAN/django,elkingtonmcb/django,nealtodd/django,mitchelljkotler/django,jhg/django,tanmaythakur/django,TridevGuha/django,jasonwzhy/django,zanderle/django,nhippenmeyer/django,divio/django,jasonwzhy/django,tanmaythakur/django,mathspace/django,marckuz/django,MounirMesselmeni/django,apocquet/django,mattseymour/django,caotianwei/django,kholidfu/django,quxiaolong1504/django,dydek/django,mshafiq9/django,AlexHill/django,ojengwa/django-1,mattrobenolt/django,felixxm/django,Nepherhotep/django,spisneha25/django,kamyu104/django,krisys/django,BMJHayward/django,Leila20/django,areski/django,vitan/django,ghickman/django,shaib/django,edevil/django,evansd/django,Sonicbids/django,evansd/django,fpy171/django,gitaarik/django,ojake/django,sarthakmeh03/django,myang321/django,haxoza/django,shtouff/django,liuliwork/django,AltSchool/django,BrotherPhil/django,gdi2290/django,blueyed/django,robhudson/django,jallohm/django,yewang15215/django,risicle/django,MarcJoan/django,mrfuxi/django,zedr/django,twz915/django,oberlin/django,GitAngel/django,Anonymous-X6/django,druuu/django,mmardini/django,Beauhurst/django,jsoref/django,rhertzog/django,gengue/django,webgeodatavore/django,poiati/django,marissazhou/django,Beauhurst/django,mrbox/django,GaussDing/django,gcd0318/django,jgeskens/django,mcrowson/django,rynomster/django,devops2014/djangosite,sopier/django,jylaxp/django,aroche/django,kevintaw/django,postrational/django,dbaxa/django,MatthewWilkes/django,TimYi/django,sgzsh269/django,SebasSBM/django,beck/django,ifduyue/django,MatthewWilkes/django,archen/django,synasius/django,bikong2/django,arun6582/django,syphar/django,kamyu104/django,cainmatt/django,elky/django,ghedsouza/django,liuliwork/django,kaedroho/django,EliotBerriot/django,stewartpark/django,Vixionar/django,rogerhu/django,dursk/django,alexallah/django,labcodes/django,helenst/django,hottwaj/django,denys-duchier/django,yamila-moreno/django,TridevGuha/django,reinout/django,jdelight/django,gengue/django,ckirby/django,ivandevp/django,Vixionar/django,YYWen0o0/python-frame-django,YYWen0o0/python-frame-django,mjtamlyn/django,andela-ooladayo/django,gcd0318/django,sjlehtin/django,guettli/django,marqueedev/django,jpic/django,aidanlister/django,feroda/django,blindroot/django,HonzaKral/django,riteshshrv/django,davidharrigan/django,salamer/django,aisipos/django,alexmorozov/django,kangfend/django,sam-tsai/django,hassanabidpk/django,willhardy/django,ericfc/django,rmboggs/django,dwightgunning/django,Mixser/django,nju520/django,vitaly4uk/django,ytjiang/django,IRI-Research/django,aroche/django,crazy-canux/django,beni55/django,indevgr/django,tanmaythakur/django,savoirfairelinux/django,delinhabit/django,alrifqi/django,piquadrat/django,mcrowson/django,koniiiik/django,jscn/django,jeezybrick/django,lmorchard/django,jhg/django,mcardillo55/django,crazy-canux/django,xrmx/django,Matt-Deacalion/django,ryanahall/django,roselleebarle04/django,joequery/django,mttr/django,ojengwa/django-1,unaizalakain/django,delhivery/django,jarshwah/django,riteshshrv/django,filias/django,ziima/django,JorgeCoock/django,mbox/django,coldmind/django,rapilabs/django,zedr/django,frePPLe/django,rajsadho/django,django-nonrel/django,x111ong/django,daniponi/django,timgraham/django,ryanahall/django,akshatharaj/django,frdb194/django,programadorjc/django,fenginx/django,gdub/django,aroche/django,rlugojr/django,kutenai/django,olasitarska/django,syphar/django,marctc/django,googleinterns/django,wweiradio/django,rajsadho/django,mathspace/django,sarthakmeh03/django,taaviteska/django,vincepandolfo/django,marckuz/django,curtisstpierre/django,saydulk/django,takeshineshiro/django,oberlin/django,hobarrera/django,aspidites/django,SoftwareMaven/django,tayfun/django,hkchenhongyi/django,atul-bhouraskar/django,alrifqi/django,digimarc/django,ghedsouza/django,nielsvanoch/django,Yong-Lee/django,jpic/django,hunter007/django,techdragon/django,joakim-hove/django,hcsturix74/django,seocam/django,BMJHayward/django,hkchenhongyi/django,erikr/django,wsmith323/django,ebar0n/django,nemesisdesign/django,simonw/django,lmorchard/django,pquentin/django,twz915/django,sadaf2605/django,ABaldwinHunter/django-clone-classic,dhruvagarwal/django,edmorley/django,ecederstrand/django,zhaodelong/django,PolicyStat/django,frankvdp/django,akshatharaj/django,ulope/django,felixxm/django,adamchainz/django,doismellburning/django,eyohansa/django,fpy171/django,lunafeng/django,zhaodelong/django,henryfjordan/django,z0by/django,seocam/django,haxoza/django,yewang15215/django,mttr/django,tysonclugg/django,sgzsh269/django,quamilek/django,jn7163/django,dursk/django,mitya57/django,darjeeling/django,gitaarik/django,kholidfu/django,aerophile/django,mjtamlyn/django,Beauhurst/django,marckuz/django,drjeep/django,bikong2/django,rwillmer/django,risicle/django,digimarc/django,alilotfi/django,stewartpark/django,AlexHill/django,bobcyw/django,jgoclawski/django,denisenkom/django,hkchenhongyi/django,RevelSystems/django,blueyed/django,redhat-openstack/django,tbeadle/django,koordinates/django,dfdx2/django,codepantry/django,solarissmoke/django,frankvdp/django,EmadMokhtar/Django,mewtaylor/django,tomchristie/django,donkirkby/django,gitaarik/django,jejimenez/django,seocam/django,karyon/django,ckirby/django,beckastar/django,frePPLe/django,jnovinger/django,marcelocure/django,MarcJoan/django,b-me/django,Korkki/django,ebar0n/django,vmarkovtsev/django,deployed/django,matiasb/django,manhhomienbienthuy/django,tysonclugg/django,areski/django,GitAngel/django,guettli/django,andela-ooladayo/django,MikeAmy/django,extremewaysback/django,jmcarp/django,Korkki/django,ticosax/django,caotianwei/django,gcd0318/django,ajoaoff/django,yamila-moreno/django,vincepandolfo/django,sam-tsai/django,JavML/django,shacker/django,wsmith323/django,avneesh91/django,gohin/django,WillGuan105/django,MarkusH/django,pasqualguerrero/django,gannetson/django,jscn/django,etos/django,andreif/django,sarvex/django,shtouff/django,reinout/django,mattseymour/django,drjeep/django,andreif/django,frankvdp/django,dhruvagarwal/django,jarshwah/django,redhat-openstack/django,GhostThrone/django,redhat-openstack/django,mrfuxi/django,jeezybrick/django,mojeto/django,mshafiq9/django,double-y/django,ericfc/django,kisna72/django,mattrobenolt/django,yamila-moreno/django,rockneurotiko/django,koniiiik/django,jsoref/django,yceruto/django,MikeAmy/django,yakky/django,neiudemo1/django,schinckel/django,epandurski/django,bak1an/django,kswiat/django,frankvdp/django,takeshineshiro/django,NullSoldier/django,DasIch/django,DrMeers/django,craynot/django,marctc/django,rogerhu/django,barbuza/django,raphaelmerx/django,zanderle/django,elijah513/django,mitya57/django,zerc/django,himleyb85/django,memtoko/django,apocquet/django,zhaodelong/django,irwinlove/django,duqiao/django,scorphus/django,akshatharaj/django,BlindHunter/django,mattseymour/django,ifduyue/django,GaussDing/django,donkirkby/django,claudep/django,runekaagaard/django-contrib-locking,jejimenez/django,payeldillip/django,loic/django,mjtamlyn/django,nielsvanoch/django,h4r5h1t/django-hauthy,indevgr/django,robhudson/django,simone/django-gb,marissazhou/django,sarvex/django,ptoraskar/django,nealtodd/django,hackerbot/DjangoDev,treyhunner/django,elkingtonmcb/django,runekaagaard/django-contrib-locking,auready/django,Endika/django,leeon/annotated-django,Leila20/django,wsmith323/django,whs/django,SoftwareMaven/django,yigitguler/django,gitaarik/django,akaariai/django,myang321/django,timgraham/django,BlindHunter/django,hnakamur/django,irwinlove/django,1013553207/django,petecummings/django,hobarrera/django,PolicyStat/django,auvipy/django,feroda/django,kangfend/django,ivandevp/django,eugena/django,ecederstrand/django,koniiiik/django,tragiclifestories/django,elijah513/django,zerc/django,seocam/django,theo-l/django,elena/django,sbellem/django,guettli/django,atul-bhouraskar/django,jgeskens/django,AltSchool/django,leeon/annotated-django,bitcity/django,fafaman/django,bak1an/django,aerophile/django,treyhunner/django,beni55/django,kcpawan/django,marckuz/django,auvipy/django,myang321/django,dydek/django,jyotsna1820/django,pauloxnet/django,JavML/django,bitcity/django,aspidites/django,GitAngel/django,apollo13/django,ytjiang/django,x111ong/django,alimony/django,shtouff/django,sjlehtin/django,rockneurotiko/django,cainmatt/django,solarissmoke/django,denis-pitul/django,liavkoren/djangoDev,rrrene/django,techdragon/django,uranusjr/django,ptoraskar/django,liu602348184/django,xwolf12/django,zulip/django,TimBuckley/effective_django,blindroot/django,jylaxp/django,jaywreddy/django,sdcooke/django,ajaali/django,rhertzog/django,rynomster/django,h4r5h1t/django-hauthy,moreati/django,barbuza/django,himleyb85/django,ghickman/django,monetate/django,bikong2/django,bspink/django,rizumu/django,mcrowson/django,kangfend/django,varunnaganathan/django,hynekcer/django,kevintaw/django,HonzaKral/django,rtindru/django,codepantry/django,raphaelmerx/django,irwinlove/django,spisneha25/django,SujaySKumar/django,dhruvagarwal/django,beni55/django,DasIch/django,YangSongzhou/django,loic/django,marqueedev/django,reinout/django,MoritzS/django,sarthakmeh03/django,rsvip/Django,ironbox360/django,mojeto/django,maxsocl/django,akintoey/django,gannetson/django,mjtamlyn/django,jpic/django,sopier/django,github-account-because-they-want-it/django,WillGuan105/django,DasIch/django,blighj/django,andyzsf/django,gdub/django,rrrene/django,salamer/django,sarthakmeh03/django,duqiao/django,roselleebarle04/django,georgemarshall/django,kswiat/django,dbaxa/django,yask123/django,jarshwah/django,kutenai/django,sergei-maertens/django,abomyi/django,jmcarp/django,lmorchard/django,MarkusH/django,charettes/django,vitan/django,TridevGuha/django,akintoey/django,nju520/django,mdj2/django,xadahiya/django,lunafeng/django,himleyb85/django,monetate/django,loic/django,MikeAmy/django,jmcarp/django,nhippenmeyer/django,katrid/django,dpetzold/django,eugena/django,yograterol/django,mitchelljkotler/django,aidanlister/django,Argon-Zhou/django,mojeto/django,varunnaganathan/django,webgeodatavore/django,ulope/django,delhivery/django,mlavin/django,JavML/django,sergei-maertens/django,dhruvagarwal/django,ghickman/django,auvipy/django,sadaf2605/django,asser/django,seanwestfall/django,b-me/django,github-account-because-they-want-it/django,megaumi/django,github-account-because-they-want-it/django,frishberg/django,rsvip/Django,monetate/django,mcardillo55/django,leekchan/django_test,coldmind/django,WillGuan105/django,rockneurotiko/django,pauloxnet/django,raphaelmerx/django,gchp/django,mewtaylor/django,fenginx/django,shownomercy/django,wetneb/django,MikeAmy/django,z0by/django,synasius/django,TridevGuha/django,knifenomad/django,zerc/django,saydulk/django,felixxm/django,denisenkom/django,peterlauri/django,rlugojr/django,fafaman/django,googleinterns/django,extremewaysback/django,whs/django,gchp/django,pipermerriam/django,fpy171/django,rizumu/django,spisneha25/django,blueyed/django,gchp/django,jenalgit/django,apollo13/django,saydulk/django,SebasSBM/django,denis-pitul/django,sephii/django,felixjimenez/django,Mixser/django,ebar0n/django,stewartpark/django,litchfield/django,ajaali/django,schinckel/django,mbox/django,donkirkby/django,hottwaj/django,rlugojr/django,craynot/django,risicle/django,akintoey/django,iambibhas/django,shaistaansari/django,rtindru/django,hybrideagle/django,phalt/django,dursk/django,freakboy3742/django,gdi2290/django,uranusjr/django,beckastar/django,oscaro/django,andreif/django,tbeadle/django,MarcJoan/django,roselleebarle04/django,mshafiq9/django,willharris/django,adambrenecki/django,gunchleoc/django,koordinates/django,gdub/django,dsanders11/django,aerophile/django,huang4fstudio/django,vmarkovtsev/django,andela-ifageyinbo/django,joequery/django,Mixser/django,benjaminjkraft/django,hynekcer/django,kangfend/django,mojeto/django,andresgz/django,camilonova/django,elky/django,schinckel/django,adamchainz/django,BlindHunter/django,pipermerriam/django,Anonymous-X6/django,iambibhas/django,ckirby/django,davidharrigan/django,ABaldwinHunter/django-clone-classic,manhhomienbienthuy/django,IRI-Research/django,dracos/django,knifenomad/django,jhg/django,zsiciarz/django,phalt/django,carljm/django,quxiaolong1504/django,akaariai/django,mdj2/django,liu602348184/django,Adnn/django,ajaali/django,oinopion/django,apollo13/django,yamila-moreno/django,adelton/django,jhoos/django,krisys/django,bobcyw/django,filias/django,moreati/django,yakky/django,pauloxnet/django,PetrDlouhy/django,jnovinger/django,henryfjordan/django,abomyi/django,stevenewey/django,djbaldey/django,delhivery/django,drjeep/django,crazy-canux/django,quamilek/django,ryangallen/django,double-y/django,rsvip/Django,gohin/django,hynekcer/django,apocquet/django,mmardini/django,anant-dev/django,ar45/django,t0in4/django,adamchainz/django,sarvex/django,MounirMesselmeni/django,doismellburning/django,dgladkov/django,rizumu/django,marcelocure/django,alexallah/django,stewartpark/django,mcella/django,marcelocure/django,etos/django,anant-dev/django,bak1an/django,marissazhou/django,dex4er/django,zhoulingjun/django,yewang15215/django,shacker/django,alexallah/django,jaywreddy/django,HousekeepLtd/django,andela-ifageyinbo/django,Mixser/django,ar45/django,zanderle/django,denys-duchier/django,kisna72/django,kswiat/django,Anonymous-X6/django,tysonclugg/django,savoirfairelinux/django,GitAngel/django,erikr/django,ABaldwinHunter/django-clone,epandurski/django,hottwaj/django,jasonbot/django,labcodes/django,Leila20/django,mrfuxi/django,karyon/django,theo-l/django,Beeblio/django,sergei-maertens/django,matiasb/django,jrrembert/django,fenginx/django,frishberg/django,mttr/django,dracos/django,takis/django,hcsturix74/django,harisibrahimkv/django,kamyu104/django,weiawe/django,lwiecek/django,hobarrera/django,extremewaysback/django,jdelight/django,supriyantomaftuh/django,edmorley/django,bobcyw/django,divio/django,MounirMesselmeni/django,YYWen0o0/python-frame-django,Nepherhotep/django,xrmx/django,gunchleoc/django,marctc/django,andela-ifageyinbo/django,dbaxa/django,jasonbot/django,jenalgit/django,hybrideagle/django,RevelSystems/django,HousekeepLtd/django,poiati/django,yograterol/django,oinopion/django,ptoraskar/django,sadaf2605/django,rajsadho/django,fafaman/django,gchp/django,ericholscher/django,knifenomad/django,JorgeCoock/django,xadahiya/django,errx/django,Beeblio/django,arun6582/django,jnovinger/django,wkschwartz/django,divio/django,katrid/django,wkschwartz/django,denisenkom/django,alexmorozov/django,alexallah/django,ajaali/django,akaariai/django,tysonclugg/django,auready/django,neiudemo1/django,frdb194/django,DONIKAN/django,dudepare/django,mlavin/django,x111ong/django,PetrDlouhy/django,mcella/django,ArnossArnossi/django,github-account-because-they-want-it/django,ckirby/django,ASCrookes/django,supriyantomaftuh/django,rynomster/django,nemesisdesign/django,Endika/django,oinopion/django,asser/django,MoritzS/django,codepantry/django,felixxm/django,RevelSystems/django,davgibbs/django,avneesh91/django,adelton/django,darkryder/django,ziima/django,tragiclifestories/django,tbeadle/django,beckastar/django,karyon/django,jallohm/django,jvkops/django,willhardy/django,ArnossArnossi/django,shaib/django,solarissmoke/django,caotianwei/django,huang4fstudio/django,tanmaythakur/django,loic/django,salamer/django,chyeh727/django,sephii/django,unaizalakain/django,redhat-openstack/django,mattrobenolt/django,beck/django,erikr/django,liuliwork/django,Korkki/django,nielsvanoch/django,syaiful6/django,roselleebarle04/django,guettli/django,neiudemo1/django,joakim-hove/django,fenginx/django,matiasb/django,googleinterns/django,riteshshrv/django,petecummings/django,dudepare/django,claudep/django,aisipos/django,riteshshrv/django,t0in4/django,zhaodelong/django,rtindru/django,krishna-pandey-git/django,kevintaw/django,zhoulingjun/django,liuliwork/django,claudep/django,megaumi/django,vitaly4uk/django,ArnossArnossi/django,RossBrunton/django,mcardillo55/django,Vixionar/django,labcodes/django,rynomster/django,darkryder/django,uranusjr/django,auready/django,lsqtongxin/django,hunter007/django,willharris/django,drjeep/django,mcella/django,t0in4/django,pasqualguerrero/django,double-y/django,alexmorozov/django,postrational/django,dfdx2/django,nhippenmeyer/django,szopu/django,1013553207/django,denys-duchier/django,scorphus/django,andela-ifageyinbo/django,vitan/django,dracos/django,rsalmaso/django,RossBrunton/django,BlindHunter/django,bitcity/django,alilotfi/django,hackerbot/DjangoDev,xrmx/django,felixjimenez/django,jsoref/django,Sonicbids/django,ojake/django,Yong-Lee/django,ASCrookes/django,henryfjordan/django,aidanlister/django,programadorjc/django,hcsturix74/django,shaistaansari/django,shaib/django,supriyantomaftuh/django,neiudemo1/django,knifenomad/django,hassanabidpk/django,tomchristie/django,lsqtongxin/django,sbellem/django,yigitguler/django,beck/django,payeldillip/django,jn7163/django,errx/django,bspink/django,weiawe/django,joequery/django,Balachan27/django,craynot/django,MarkusH/django,ironbox360/django,shownomercy/django,jylaxp/django,vitan/django,mewtaylor/django,digimarc/django,delinhabit/django,jnovinger/django,dwightgunning/django,ryangallen/django,KokareIITP/django,takis/django,jasonbot/django,Argon-Zhou/django,freakboy3742/django,bspink/django,sarvex/django,harisibrahimkv/django,bitcity/django,dfunckt/django,Sonicbids/django,rtindru/django,django/django,SoftwareMaven/django,manhhomienbienthuy/django,WSDC-NITWarangal/django,tbeadle/django,shaistaansari/django,charettes/django,WillGuan105/django,georgemarshall/django,yask123/django,daniponi/django,lwiecek/django,SebasSBM/django,nhippenmeyer/django,MatthewWilkes/django,moreati/django,szopu/django,shacker/django,liu602348184/django,NullSoldier/django,jgoclawski/django,baylee/django,daniponi/django,sgzsh269/django,SujaySKumar/django,digimarc/django,manhhomienbienthuy/django,leekchan/django_test,saydulk/django,TimYi/django,cainmatt/django,andresgz/django,jejimenez/django,ASCrookes/django,zulip/django,mlavin/django,TimBuckley/effective_django,wetneb/django,takis/django,Vixionar/django,robhudson/django,elena/django,beckastar/django,davgibbs/django,adambrenecki/django,charettes/django,shaistaansari/django,jenalgit/django,helenst/django,ericholscher/django,alexmorozov/django,jgoclawski/django,jasonwzhy/django,frishberg/django,edevil/django,duqiao/django,Adnn/django,hassanabidpk/django,felixjimenez/django,rockneurotiko/django,jallohm/django,webgeodatavore/django,taaviteska/django,harisibrahimkv/django,rizumu/django,programadorjc/django,lmorchard/django,druuu/django,indevgr/django,adambrenecki/django,ojake/django,lwiecek/django,joequery/django,willhardy/django,simone/django-gb,schinckel/django,weiawe/django,memtoko/django,TimYi/django,frePPLe/django,Adnn/django,jeezybrick/django,kisna72/django,gohin/django,ABaldwinHunter/django-clone-classic,sam-tsai/django,hnakamur/django,hobarrera/django,tayfun/django,karyon/django,yask123/django,marctc/django,zanderle/django,t0in4/django,ziima/django,supriyantomaftuh/django,blindroot/django,NullSoldier/django,petecummings/django,intgr/django,dfunckt/django,Balachan27/django,h4r5h1t/django-hauthy,elena/django,HousekeepLtd/django,synasius/django,divio/django,dsanders11/django,duqiao/django,GaussDing/django,yceruto/django,ABaldwinHunter/django-clone,eugena/django,koniiiik/django,dursk/django,rmboggs/django,varunnaganathan/django,rwillmer/django,krishna-pandey-git/django,filias/django,coldmind/django,abomyi/django,EliotBerriot/django,atul-bhouraskar/django,vincepandolfo/django,dex4er/django,harisibrahimkv/django,eyohansa/django,Nepherhotep/django,avneesh91/django,dwightgunning/django,archen/django,indevgr/django,ryanahall/django,follow99/django,AlexHill/django,pipermerriam/django,mmardini/django,rwillmer/django,ASCrookes/django,apollo13/django,sbellem/django,1013553207/django,syaiful6/django,devops2014/djangosite,crazy-canux/django,synasius/django,kosz85/django,mrbox/django,zulip/django,ecederstrand/django,WSDC-NITWarangal/django,jsoref/django,GhostThrone/django,wetneb/django,kosz85/django,jrrembert/django,jrrembert/django,postrational/django,andyzsf/django,memtoko/django,aerophile/django,tayfun/django,jenalgit/django,rmboggs/django,xwolf12/django,DONIKAN/django,lunafeng/django,oberlin/django,follow99/django,simonw/django,litchfield/django,waytai/django,treyhunner/django,oscaro/django,tuhangdi/django,druuu/django,Beeblio/django,piquadrat/django,krisys/django,HousekeepLtd/django,TimYi/django,errx/django,darjeeling/django,sopier/django,delinhabit/django,edevil/django,gengue/django,elkingtonmcb/django,z0by/django,follow99/django,bspink/django,mathspace/django,mrbox/django,sbellem/django,payeldillip/django,pasqualguerrero/django,elijah513/django,DasIch/django,1013553207/django,kutenai/django,dfunckt/django,tcwicklund/django,jallohm/django,druuu/django,EliotBerriot/django,BMJHayward/django,aidanlister/django,kutenai/django,ghedsouza/django,rogerhu/django,django-nonrel/django,davgibbs/django,Y3K/django,coldmind/django,olasitarska/django,dgladkov/django,daniponi/django,oinopion/django,cainmatt/django,yakky/django,scorphus/django,HonzaKral/django,olasitarska/django,sgzsh269/django,jmcarp/django,kosz85/django,feroda/django,blighj/django,Yong-Lee/django,jvkops/django,jgeskens/django,pauloxnet/django,jrrembert/django,dydek/django,b-me/django,GhostThrone/django,rwillmer/django,baylee/django,BrotherPhil/django,twz915/django,dfdx2/django,rlugojr/django,unaizalakain/django,jhg/django,mbox/django,aroche/django,tcwicklund/django,wkschwartz/django,follow99/django,litchfield/django,jhoos/django,nju520/django,theo-l/django,joakim-hove/django,rsalmaso/django,marqueedev/django,labcodes/django,jscn/django,intgr/django,b-me/django,syphar/django,jdelight/django,carljm/django,intgr/django,Y3K/django,aspidites/django,AndrewGrossman/django,shaib/django,donkirkby/django,Balachan27/django,sdcooke/django,YangSongzhou/django,simonw/django,liu602348184/django,dsanders11/django,elkingtonmcb/django,zulip/django,alrifqi/django,monetate/django,etos/django,akintoey/django,mathspace/django,yask123/django,katrid/django,rsvip/Django,ericfc/django,JorgeCoock/django,litchfield/django,lwiecek/django,wweiradio/django,barbuza/django,delinhabit/django,ifduyue/django,shownomercy/django,gohin/django,auready/django,django/django,pquentin/django,double-y/django,zedr/django,deployed/django,iambibhas/django,stevenewey/django,h4r5h1t/django-hauthy,zerc/django,MounirMesselmeni/django,Beeblio/django,yakky/django,webgeodatavore/django,mrfuxi/django,ytjiang/django,gdi2290/django,mitya57/django,darjeeling/django,scorphus/django,RossBrunton/django,jn7163/django,ajoaoff/django,x111ong/django,ABaldwinHunter/django-clone,aisipos/django,zsiciarz/django,quxiaolong1504/django,sjlehtin/django,vmarkovtsev/django,shacker/django,JavML/django,jaywreddy/django,mdj2/django,xadahiya/django,Endika/django,georgemarshall/django,makinacorpus/django,apocquet/django,dracos/django,yograterol/django,codepantry/django,ivandevp/django,andresgz/django,quamilek/django,alilotfi/django,Leila20/django,haxoza/django,jylaxp/django,uranusjr/django,curtisstpierre/django,kaedroho/django,jarshwah/django,davgibbs/django,hnakamur/django,PetrDlouhy/django,koordinates/django,rapilabs/django,wweiradio/django,benjaminjkraft/django,ryangallen/django,EmadMokhtar/Django,avanov/django,ABaldwinHunter/django-clone,oscaro/django,curtisstpierre/django,rrrene/django,mlavin/django,freakboy3742/django,moreati/django,kcpawan/django,nemesisdesign/django,intgr/django,kaedroho/django,JorgeCoock/django,BrotherPhil/django,django-nonrel/django,kcpawan/django,kosz85/django,Argon-Zhou/django,beck/django,Endika/django,areski/django,gannetson/django,willharris/django,hunter007/django,alimony/django,yograterol/django,simone/django-gb,risicle/django,ojengwa/django-1,carljm/django,YangSongzhou/django,rmboggs/django,pasqualguerrero/django,timgraham/django,liavkoren/djangoDev
|
from django.views.generic.edit import FormMixin
from django.contrib import messages
class SuccessMessageMixin(FormMixin):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
Remove unnecessary and problematic parent class from SuccessMessageMixin
refs #16319, thanks to bmispelon for the catch
|
from django.contrib import messages
class SuccessMessageMixin(object):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
<commit_before>from django.views.generic.edit import FormMixin
from django.contrib import messages
class SuccessMessageMixin(FormMixin):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
<commit_msg>Remove unnecessary and problematic parent class from SuccessMessageMixin
refs #16319, thanks to bmispelon for the catch<commit_after>
|
from django.contrib import messages
class SuccessMessageMixin(object):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
from django.views.generic.edit import FormMixin
from django.contrib import messages
class SuccessMessageMixin(FormMixin):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
Remove unnecessary and problematic parent class from SuccessMessageMixin
refs #16319, thanks to bmispelon for the catchfrom django.contrib import messages
class SuccessMessageMixin(object):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
<commit_before>from django.views.generic.edit import FormMixin
from django.contrib import messages
class SuccessMessageMixin(FormMixin):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
<commit_msg>Remove unnecessary and problematic parent class from SuccessMessageMixin
refs #16319, thanks to bmispelon for the catch<commit_after>from django.contrib import messages
class SuccessMessageMixin(object):
"""
Adds a success message on successful form submission.
"""
success_message = ''
def form_valid(self, form):
response = super(SuccessMessageMixin, self).form_valid(form)
success_message = self.get_success_message(form.cleaned_data)
if success_message:
messages.success(self.request, success_message)
return response
def get_success_message(self, cleaned_data):
return self.success_message % cleaned_data
|
efb98ffae0a92d9a0facc76cd43bb51dca3b2820
|
nibble_aes/find_dist/find_ids.py
|
nibble_aes/find_dist/find_ids.py
|
"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
return ast.literal_eval(line)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
forward_diffs = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss])
backward_diffs = []
with open(sys.argv[2]) as g:
for i, backward_rounds, yss in map(parse, g):
backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss])
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
ids = []
for i, forward_rounds, xss in forward_diffs:
for j, backward_rounds, yss in backward_diffs:
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
|
"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
i, rounds, xss = ast.literal_eval(line)
yss = [set(xs) for xs in xss]
return (i, rounds, yss)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
ids = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
if forward_rounds < 2:
continue
with open(sys.argv[2]) as g:
for j, backward_rounds, yss in map(parse, g):
if backward_rounds < 2:
continue
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
|
Revert "Trade memory for time."
|
Revert "Trade memory for time."
This reverts commit f4c13756eef0dc6b7231e37d5f5d9029dea1fb62.
|
Python
|
mit
|
wei2912/aes-idc,wei2912/idc,wei2912/idc,wei2912/idc,wei2912/idc,wei2912/aes-idc
|
"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
return ast.literal_eval(line)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
forward_diffs = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss])
backward_diffs = []
with open(sys.argv[2]) as g:
for i, backward_rounds, yss in map(parse, g):
backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss])
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
ids = []
for i, forward_rounds, xss in forward_diffs:
for j, backward_rounds, yss in backward_diffs:
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
Revert "Trade memory for time."
This reverts commit f4c13756eef0dc6b7231e37d5f5d9029dea1fb62.
|
"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
i, rounds, xss = ast.literal_eval(line)
yss = [set(xs) for xs in xss]
return (i, rounds, yss)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
ids = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
if forward_rounds < 2:
continue
with open(sys.argv[2]) as g:
for j, backward_rounds, yss in map(parse, g):
if backward_rounds < 2:
continue
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
|
<commit_before>"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
return ast.literal_eval(line)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
forward_diffs = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss])
backward_diffs = []
with open(sys.argv[2]) as g:
for i, backward_rounds, yss in map(parse, g):
backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss])
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
ids = []
for i, forward_rounds, xss in forward_diffs:
for j, backward_rounds, yss in backward_diffs:
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
<commit_msg>Revert "Trade memory for time."
This reverts commit f4c13756eef0dc6b7231e37d5f5d9029dea1fb62.<commit_after>
|
"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
i, rounds, xss = ast.literal_eval(line)
yss = [set(xs) for xs in xss]
return (i, rounds, yss)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
ids = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
if forward_rounds < 2:
continue
with open(sys.argv[2]) as g:
for j, backward_rounds, yss in map(parse, g):
if backward_rounds < 2:
continue
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
|
"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
return ast.literal_eval(line)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
forward_diffs = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss])
backward_diffs = []
with open(sys.argv[2]) as g:
for i, backward_rounds, yss in map(parse, g):
backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss])
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
ids = []
for i, forward_rounds, xss in forward_diffs:
for j, backward_rounds, yss in backward_diffs:
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
Revert "Trade memory for time."
This reverts commit f4c13756eef0dc6b7231e37d5f5d9029dea1fb62."""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
i, rounds, xss = ast.literal_eval(line)
yss = [set(xs) for xs in xss]
return (i, rounds, yss)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
ids = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
if forward_rounds < 2:
continue
with open(sys.argv[2]) as g:
for j, backward_rounds, yss in map(parse, g):
if backward_rounds < 2:
continue
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
|
<commit_before>"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
return ast.literal_eval(line)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
forward_diffs = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
forward_diffs.append((i, forward_rounds, [set(xs) for xs in xss])
backward_diffs = []
with open(sys.argv[2]) as g:
for i, backward_rounds, yss in map(parse, g):
backward_diffs.append((i, backward_rounds, [set(ys) for ys in yss])
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
ids = []
for i, forward_rounds, xss in forward_diffs:
for j, backward_rounds, yss in backward_diffs:
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
<commit_msg>Revert "Trade memory for time."
This reverts commit f4c13756eef0dc6b7231e37d5f5d9029dea1fb62.<commit_after>"""
Derive a list of impossible differentials.
"""
import ast
import sys
def parse(line):
i, rounds, xss = ast.literal_eval(line)
yss = [set(xs) for xs in xss]
return (i, rounds, yss)
def main():
if len(sys.argv) != 3:
print("usage: ./find_ids.py [forward differentials file] [backward differentials file]", file=sys.stderr)
sys.exit(1)
ids = []
with open(sys.argv[1]) as f:
for i, forward_rounds, xss in map(parse, f):
if forward_rounds < 2:
continue
with open(sys.argv[2]) as g:
for j, backward_rounds, yss in map(parse, g):
if backward_rounds < 2:
continue
# truncate first round of backward differential
# by comparing last round of forward differential and second last
# round of backward differential
if xss[-1].isdisjoint(yss[-2]):
backward_rounds -= 1
print((i, forward_rounds, backward_rounds, j))
if __name__ == "__main__":
main()
|
6cffdd1df7a3bc1d5abded0610a8d4a7f7cb8159
|
en-2014-03-23-restarting-a-python-script-within-itself/daemon.py
|
en-2014-03-23-restarting-a-python-script-within-itself/daemon.py
|
#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) > mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
|
#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) != mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
|
Use '!=' instead of '>' when checking for mtime changes.
|
blog/en-2014-03-23: Use '!=' instead of '>' when checking for mtime changes.
The use of '!=' ensures that the reloading works even when the new mtime is
lower than the current mtime (e.g. when we use a backed-up version of a file).
Thanks to Ronald for pointing this out
(http://blog.petrzemek.net/2014/03/23/restarting-a-python-script-within-itself/#comment-668).
|
Python
|
bsd-3-clause
|
s3rvac/blog,s3rvac/blog,s3rvac/blog,s3rvac/blog
|
#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) > mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
blog/en-2014-03-23: Use '!=' instead of '>' when checking for mtime changes.
The use of '!=' ensures that the reloading works even when the new mtime is
lower than the current mtime (e.g. when we use a backed-up version of a file).
Thanks to Ronald for pointing this out
(http://blog.petrzemek.net/2014/03/23/restarting-a-python-script-within-itself/#comment-668).
|
#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) != mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) > mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
<commit_msg>blog/en-2014-03-23: Use '!=' instead of '>' when checking for mtime changes.
The use of '!=' ensures that the reloading works even when the new mtime is
lower than the current mtime (e.g. when we use a backed-up version of a file).
Thanks to Ronald for pointing this out
(http://blog.petrzemek.net/2014/03/23/restarting-a-python-script-within-itself/#comment-668).<commit_after>
|
#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) != mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
|
#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) > mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
blog/en-2014-03-23: Use '!=' instead of '>' when checking for mtime changes.
The use of '!=' ensures that the reloading works even when the new mtime is
lower than the current mtime (e.g. when we use a backed-up version of a file).
Thanks to Ronald for pointing this out
(http://blog.petrzemek.net/2014/03/23/restarting-a-python-script-within-itself/#comment-668).#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) != mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
|
<commit_before>#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) > mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
<commit_msg>blog/en-2014-03-23: Use '!=' instead of '>' when checking for mtime changes.
The use of '!=' ensures that the reloading works even when the new mtime is
lower than the current mtime (e.g. when we use a backed-up version of a file).
Thanks to Ronald for pointing this out
(http://blog.petrzemek.net/2014/03/23/restarting-a-python-script-within-itself/#comment-668).<commit_after>#!/usr/bin/env python
import os
import sys
from os.path import getmtime
# Parse script arguments and configuration files.
# ...
WATCHED_FILES = [__file__]
WATCHED_FILES_MTIMES = [(f, getmtime(f)) for f in WATCHED_FILES]
while True:
# Wait for inputs and act on them.
# ...
# Check whether a watched file has changed.
for f, mtime in WATCHED_FILES_MTIMES:
if getmtime(f) != mtime:
# One of the files has changed, so restart the script.
print('--> restarting')
os.execv(__file__, sys.argv)
|
10bfa701f352e0f916b1edd9913bee788f09568f
|
oscar/apps/catalogue/managers.py
|
oscar/apps/catalogue/managers.py
|
from django.db import models
class ProductManager(models.Manager):
def base_queryset(self):
"""
Return ``QuerySet`` with related content pre-loaded.
"""
return self.get_query_set().select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
).all()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
"""
def get_query_set(self):
return super(BrowsableProductManager, self).get_query_set().filter(
parent=None)
|
from django.db import models
class ProductQuerySet(models.query.QuerySet):
def base_queryset(self):
"""
Applies select_related and prefetch_related for commonly related
models to save on queries
"""
return self.select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
)
def browsable(self):
"""
Excludes non-canonical products.
"""
return self.filter(parent=None)
class ProductManager(models.Manager):
"""
Uses ProductQuerySet and proxies its methods to allow chaining
Once Django 1.7 lands, this class can probably be removed:
https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa
"""
def get_queryset(self):
return ProductQuerySet(self.model, using=self._db)
def browsable(self):
return self.get_queryset().browsable()
def base_queryset(self):
return self.get_queryset().base_queryset()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
Could be deprecated after Oscar 0.7 is released
"""
def get_queryset(self):
return super(BrowsableProductManager, self).get_queryset().browsable()
|
Allow chaining of Product's custom querysets
|
Allow chaining of Product's custom querysets
This aligns the implementation of Oscar specific QuerySet Methods with
the implementation in current django core[1].
While this is not DRY, it does deliver on chainability and can be seen
as preparation to take advantage of the improvements coming to this part
of django in 1.7 [2]
The original motivation for this was that using a custom QuerySet while
trying not to copy code from oscar felt too hard.
[1] https://github.com/django/django/blob/1.6.2/django/db/models/manager.py#L123
[2] https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager
Fixes #1278.
Deprecation comments added by @maikhoepfel
|
Python
|
bsd-3-clause
|
thechampanurag/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj,monikasulik/django-oscar,dongguangming/django-oscar,QLGu/django-oscar,WadeYuChen/django-oscar,josesanch/django-oscar,vovanbo/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,itbabu/django-oscar,WadeYuChen/django-oscar,anentropic/django-oscar,dongguangming/django-oscar,QLGu/django-oscar,WadeYuChen/django-oscar,josesanch/django-oscar,eddiep1101/django-oscar,marcoantoniooliveira/labweb,michaelkuty/django-oscar,Bogh/django-oscar,jlmadurga/django-oscar,john-parton/django-oscar,MatthewWilkes/django-oscar,amirrpp/django-oscar,pasqualguerrero/django-oscar,taedori81/django-oscar,michaelkuty/django-oscar,vovanbo/django-oscar,taedori81/django-oscar,QLGu/django-oscar,monikasulik/django-oscar,django-oscar/django-oscar,saadatqadri/django-oscar,Jannes123/django-oscar,amirrpp/django-oscar,manevant/django-oscar,manevant/django-oscar,taedori81/django-oscar,ka7eh/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,mexeniz/django-oscar,eddiep1101/django-oscar,ademuk/django-oscar,rocopartners/django-oscar,nickpack/django-oscar,anentropic/django-oscar,MatthewWilkes/django-oscar,bschuon/django-oscar,DrOctogon/unwash_ecom,jlmadurga/django-oscar,bschuon/django-oscar,QLGu/django-oscar,pasqualguerrero/django-oscar,pdonadeo/django-oscar,marcoantoniooliveira/labweb,spartonia/django-oscar,lijoantony/django-oscar,saadatqadri/django-oscar,manevant/django-oscar,jinnykoo/wuyisj,thechampanurag/django-oscar,john-parton/django-oscar,jinnykoo/wuyisj.com,adamend/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,lijoantony/django-oscar,adamend/django-oscar,jmt4/django-oscar,sasha0/django-oscar,jlmadurga/django-oscar,okfish/django-oscar,josesanch/django-oscar,kapari/django-oscar,DrOctogon/unwash_ecom,saadatqadri/django-oscar,sasha0/django-oscar,itbabu/django-oscar,mexeniz/django-oscar,anentropic/django-oscar,MatthewWilkes/django-oscar,bnprk/django-oscar,marcoantoniooliveira/labweb,kapari/django-oscar,binarydud/django-oscar,machtfit/django-oscar,nickpack/django-oscar,adamend/django-oscar,nickpack/django-oscar,ahmetdaglarbas/e-commerce,kapari/django-oscar,jinnykoo/wuyisj.com,binarydud/django-oscar,spartonia/django-oscar,vovanbo/django-oscar,machtfit/django-oscar,kapt/django-oscar,monikasulik/django-oscar,itbabu/django-oscar,ka7eh/django-oscar,john-parton/django-oscar,vovanbo/django-oscar,manevant/django-oscar,Bogh/django-oscar,jlmadurga/django-oscar,eddiep1101/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,amirrpp/django-oscar,thechampanurag/django-oscar,lijoantony/django-oscar,bnprk/django-oscar,jinnykoo/wuyisj.com,sonofatailor/django-oscar,amirrpp/django-oscar,nickpack/django-oscar,michaelkuty/django-oscar,michaelkuty/django-oscar,jinnykoo/wuyisj.com,pdonadeo/django-oscar,anentropic/django-oscar,Bogh/django-oscar,faratro/django-oscar,nfletton/django-oscar,jmt4/django-oscar,ademuk/django-oscar,jinnykoo/wuyisj,sonofatailor/django-oscar,rocopartners/django-oscar,faratro/django-oscar,jinnykoo/wuyisj,pdonadeo/django-oscar,itbabu/django-oscar,machtfit/django-oscar,MatthewWilkes/django-oscar,john-parton/django-oscar,ademuk/django-oscar,mexeniz/django-oscar,jinnykoo/christmas,solarissmoke/django-oscar,binarydud/django-oscar,taedori81/django-oscar,dongguangming/django-oscar,WillisXChen/django-oscar,jmt4/django-oscar,solarissmoke/django-oscar,Jannes123/django-oscar,bschuon/django-oscar,solarissmoke/django-oscar,nfletton/django-oscar,jinnykoo/christmas,thechampanurag/django-oscar,saadatqadri/django-oscar,kapt/django-oscar,sonofatailor/django-oscar,okfish/django-oscar,Bogh/django-oscar,spartonia/django-oscar,monikasulik/django-oscar,ka7eh/django-oscar,nfletton/django-oscar,sasha0/django-oscar,dongguangming/django-oscar,sasha0/django-oscar,spartonia/django-oscar,faratro/django-oscar,pasqualguerrero/django-oscar,okfish/django-oscar,DrOctogon/unwash_ecom,django-oscar/django-oscar,ahmetdaglarbas/e-commerce,jinnykoo/christmas,faratro/django-oscar,ka7eh/django-oscar,ahmetdaglarbas/e-commerce,Jannes123/django-oscar,adamend/django-oscar,bnprk/django-oscar,kapari/django-oscar,rocopartners/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,kapt/django-oscar,django-oscar/django-oscar,bnprk/django-oscar,rocopartners/django-oscar,WillisXChen/django-oscar,bschuon/django-oscar,marcoantoniooliveira/labweb,binarydud/django-oscar,eddiep1101/django-oscar,ademuk/django-oscar,okfish/django-oscar,pdonadeo/django-oscar,WadeYuChen/django-oscar
|
from django.db import models
class ProductManager(models.Manager):
def base_queryset(self):
"""
Return ``QuerySet`` with related content pre-loaded.
"""
return self.get_query_set().select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
).all()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
"""
def get_query_set(self):
return super(BrowsableProductManager, self).get_query_set().filter(
parent=None)
Allow chaining of Product's custom querysets
This aligns the implementation of Oscar specific QuerySet Methods with
the implementation in current django core[1].
While this is not DRY, it does deliver on chainability and can be seen
as preparation to take advantage of the improvements coming to this part
of django in 1.7 [2]
The original motivation for this was that using a custom QuerySet while
trying not to copy code from oscar felt too hard.
[1] https://github.com/django/django/blob/1.6.2/django/db/models/manager.py#L123
[2] https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager
Fixes #1278.
Deprecation comments added by @maikhoepfel
|
from django.db import models
class ProductQuerySet(models.query.QuerySet):
def base_queryset(self):
"""
Applies select_related and prefetch_related for commonly related
models to save on queries
"""
return self.select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
)
def browsable(self):
"""
Excludes non-canonical products.
"""
return self.filter(parent=None)
class ProductManager(models.Manager):
"""
Uses ProductQuerySet and proxies its methods to allow chaining
Once Django 1.7 lands, this class can probably be removed:
https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa
"""
def get_queryset(self):
return ProductQuerySet(self.model, using=self._db)
def browsable(self):
return self.get_queryset().browsable()
def base_queryset(self):
return self.get_queryset().base_queryset()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
Could be deprecated after Oscar 0.7 is released
"""
def get_queryset(self):
return super(BrowsableProductManager, self).get_queryset().browsable()
|
<commit_before>from django.db import models
class ProductManager(models.Manager):
def base_queryset(self):
"""
Return ``QuerySet`` with related content pre-loaded.
"""
return self.get_query_set().select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
).all()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
"""
def get_query_set(self):
return super(BrowsableProductManager, self).get_query_set().filter(
parent=None)
<commit_msg>Allow chaining of Product's custom querysets
This aligns the implementation of Oscar specific QuerySet Methods with
the implementation in current django core[1].
While this is not DRY, it does deliver on chainability and can be seen
as preparation to take advantage of the improvements coming to this part
of django in 1.7 [2]
The original motivation for this was that using a custom QuerySet while
trying not to copy code from oscar felt too hard.
[1] https://github.com/django/django/blob/1.6.2/django/db/models/manager.py#L123
[2] https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager
Fixes #1278.
Deprecation comments added by @maikhoepfel<commit_after>
|
from django.db import models
class ProductQuerySet(models.query.QuerySet):
def base_queryset(self):
"""
Applies select_related and prefetch_related for commonly related
models to save on queries
"""
return self.select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
)
def browsable(self):
"""
Excludes non-canonical products.
"""
return self.filter(parent=None)
class ProductManager(models.Manager):
"""
Uses ProductQuerySet and proxies its methods to allow chaining
Once Django 1.7 lands, this class can probably be removed:
https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa
"""
def get_queryset(self):
return ProductQuerySet(self.model, using=self._db)
def browsable(self):
return self.get_queryset().browsable()
def base_queryset(self):
return self.get_queryset().base_queryset()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
Could be deprecated after Oscar 0.7 is released
"""
def get_queryset(self):
return super(BrowsableProductManager, self).get_queryset().browsable()
|
from django.db import models
class ProductManager(models.Manager):
def base_queryset(self):
"""
Return ``QuerySet`` with related content pre-loaded.
"""
return self.get_query_set().select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
).all()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
"""
def get_query_set(self):
return super(BrowsableProductManager, self).get_query_set().filter(
parent=None)
Allow chaining of Product's custom querysets
This aligns the implementation of Oscar specific QuerySet Methods with
the implementation in current django core[1].
While this is not DRY, it does deliver on chainability and can be seen
as preparation to take advantage of the improvements coming to this part
of django in 1.7 [2]
The original motivation for this was that using a custom QuerySet while
trying not to copy code from oscar felt too hard.
[1] https://github.com/django/django/blob/1.6.2/django/db/models/manager.py#L123
[2] https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager
Fixes #1278.
Deprecation comments added by @maikhoepfelfrom django.db import models
class ProductQuerySet(models.query.QuerySet):
def base_queryset(self):
"""
Applies select_related and prefetch_related for commonly related
models to save on queries
"""
return self.select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
)
def browsable(self):
"""
Excludes non-canonical products.
"""
return self.filter(parent=None)
class ProductManager(models.Manager):
"""
Uses ProductQuerySet and proxies its methods to allow chaining
Once Django 1.7 lands, this class can probably be removed:
https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa
"""
def get_queryset(self):
return ProductQuerySet(self.model, using=self._db)
def browsable(self):
return self.get_queryset().browsable()
def base_queryset(self):
return self.get_queryset().base_queryset()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
Could be deprecated after Oscar 0.7 is released
"""
def get_queryset(self):
return super(BrowsableProductManager, self).get_queryset().browsable()
|
<commit_before>from django.db import models
class ProductManager(models.Manager):
def base_queryset(self):
"""
Return ``QuerySet`` with related content pre-loaded.
"""
return self.get_query_set().select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
).all()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
"""
def get_query_set(self):
return super(BrowsableProductManager, self).get_query_set().filter(
parent=None)
<commit_msg>Allow chaining of Product's custom querysets
This aligns the implementation of Oscar specific QuerySet Methods with
the implementation in current django core[1].
While this is not DRY, it does deliver on chainability and can be seen
as preparation to take advantage of the improvements coming to this part
of django in 1.7 [2]
The original motivation for this was that using a custom QuerySet while
trying not to copy code from oscar felt too hard.
[1] https://github.com/django/django/blob/1.6.2/django/db/models/manager.py#L123
[2] https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager
Fixes #1278.
Deprecation comments added by @maikhoepfel<commit_after>from django.db import models
class ProductQuerySet(models.query.QuerySet):
def base_queryset(self):
"""
Applies select_related and prefetch_related for commonly related
models to save on queries
"""
return self.select_related('product_class')\
.prefetch_related('variants',
'product_options',
'product_class__options',
'stockrecords',
'images',
)
def browsable(self):
"""
Excludes non-canonical products.
"""
return self.filter(parent=None)
class ProductManager(models.Manager):
"""
Uses ProductQuerySet and proxies its methods to allow chaining
Once Django 1.7 lands, this class can probably be removed:
https://docs.djangoproject.com/en/dev/releases/1.7/#calling-custom-queryset-methods-from-the-manager #n oqa
"""
def get_queryset(self):
return ProductQuerySet(self.model, using=self._db)
def browsable(self):
return self.get_queryset().browsable()
def base_queryset(self):
return self.get_queryset().base_queryset()
class BrowsableProductManager(ProductManager):
"""
Excludes non-canonical products
Could be deprecated after Oscar 0.7 is released
"""
def get_queryset(self):
return super(BrowsableProductManager, self).get_queryset().browsable()
|
03db6c12584652230fe0cd1f982f2a70a7c1630b
|
test/test_ticket.py
|
test/test_ticket.py
|
import unittest
from mock import Mock
import sys
import os
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
if __name__ == '__main__':
unittest.main()
|
import unittest
from mock import Mock
import sys
import os
import datetime
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
class TestUpdateTicket(unittest.TestCase):
ticket_id = 1
def setUp(self):
server = Mock()
timestamp = datetime.datetime.now()
server.ticket.get.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
server.ticket.update.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
self.ticket = Ticket(server)
def testComment(self):
self.ticket.comment(self.ticket_id, "some comment")
self.ticket.api.get.assert_called_with(1)
if __name__ == '__main__':
unittest.main()
|
Add test for comment api
|
Add test for comment api
|
Python
|
apache-2.0
|
Jimdo/pytrac,Jimdo/pytrac
|
import unittest
from mock import Mock
import sys
import os
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
if __name__ == '__main__':
unittest.main()
Add test for comment api
|
import unittest
from mock import Mock
import sys
import os
import datetime
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
class TestUpdateTicket(unittest.TestCase):
ticket_id = 1
def setUp(self):
server = Mock()
timestamp = datetime.datetime.now()
server.ticket.get.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
server.ticket.update.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
self.ticket = Ticket(server)
def testComment(self):
self.ticket.comment(self.ticket_id, "some comment")
self.ticket.api.get.assert_called_with(1)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from mock import Mock
import sys
import os
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for comment api<commit_after>
|
import unittest
from mock import Mock
import sys
import os
import datetime
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
class TestUpdateTicket(unittest.TestCase):
ticket_id = 1
def setUp(self):
server = Mock()
timestamp = datetime.datetime.now()
server.ticket.get.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
server.ticket.update.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
self.ticket = Ticket(server)
def testComment(self):
self.ticket.comment(self.ticket_id, "some comment")
self.ticket.api.get.assert_called_with(1)
if __name__ == '__main__':
unittest.main()
|
import unittest
from mock import Mock
import sys
import os
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
if __name__ == '__main__':
unittest.main()
Add test for comment apiimport unittest
from mock import Mock
import sys
import os
import datetime
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
class TestUpdateTicket(unittest.TestCase):
ticket_id = 1
def setUp(self):
server = Mock()
timestamp = datetime.datetime.now()
server.ticket.get.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
server.ticket.update.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
self.ticket = Ticket(server)
def testComment(self):
self.ticket.comment(self.ticket_id, "some comment")
self.ticket.api.get.assert_called_with(1)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from mock import Mock
import sys
import os
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for comment api<commit_after>import unittest
from mock import Mock
import sys
import os
import datetime
from pytrac import Ticket
class TestTicket(unittest.TestCase):
def setUp(self):
server = Mock()
self.ticket = Ticket(server)
def testSearchWithAllParams(self):
self.ticket.search(summary='test_summary', owner='someowner', status='new')
self.ticket.api.query.assert_called_with('max=0&summary~=test_summary&owner=someowner&status=new')
class TestUpdateTicket(unittest.TestCase):
ticket_id = 1
def setUp(self):
server = Mock()
timestamp = datetime.datetime.now()
server.ticket.get.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
server.ticket.update.return_value = [self.ticket_id,
timestamp,
timestamp,
{'_ts': timestamp,
'action': 'leave'}]
self.ticket = Ticket(server)
def testComment(self):
self.ticket.comment(self.ticket_id, "some comment")
self.ticket.api.get.assert_called_with(1)
if __name__ == '__main__':
unittest.main()
|
900ab180a8e255cc46e0583d251c5a71fc27f5d6
|
src/waldur_mastermind/marketplace_rancher/processors.py
|
src/waldur_mastermind/marketplace_rancher/processors.py
|
from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
|
from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
'install_longhorn',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
|
Add new field to Processor
|
Add new field to Processor
|
Python
|
mit
|
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur
|
from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
Add new field to Processor
|
from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
'install_longhorn',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
|
<commit_before>from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
<commit_msg>Add new field to Processor<commit_after>
|
from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
'install_longhorn',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
|
from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
Add new field to Processorfrom waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
'install_longhorn',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
|
<commit_before>from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
<commit_msg>Add new field to Processor<commit_after>from waldur_mastermind.marketplace import processors
from waldur_rancher import views as rancher_views
class RancherCreateProcessor(processors.BaseCreateResourceProcessor):
viewset = rancher_views.ClusterViewSet
fields = (
'name',
'description',
'nodes',
'tenant_settings',
'ssh_public_key',
'install_longhorn',
)
class RancherDeleteProcessor(processors.DeleteResourceProcessor):
viewset = rancher_views.ClusterViewSet
|
987b45af9ec719ce2ded8615bb7177979e688184
|
tests/functional/test_warning.py
|
tests/functional/test_warning.py
|
import textwrap
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
|
import textwrap
def test_environ(script, tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# $PYTHONWARNINGS was added in python2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
|
Move docstring to appropriately placed comment
|
Move docstring to appropriately placed comment
|
Python
|
mit
|
pfmoore/pip,pradyunsg/pip,xavfernandez/pip,rouge8/pip,pfmoore/pip,rouge8/pip,pradyunsg/pip,xavfernandez/pip,pypa/pip,sbidoul/pip,sbidoul/pip,xavfernandez/pip,pypa/pip,rouge8/pip
|
import textwrap
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
Move docstring to appropriately placed comment
|
import textwrap
def test_environ(script, tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# $PYTHONWARNINGS was added in python2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
|
<commit_before>import textwrap
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
<commit_msg>Move docstring to appropriately placed comment<commit_after>
|
import textwrap
def test_environ(script, tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# $PYTHONWARNINGS was added in python2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
|
import textwrap
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
Move docstring to appropriately placed commentimport textwrap
def test_environ(script, tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# $PYTHONWARNINGS was added in python2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
|
<commit_before>import textwrap
def test_environ(script, tmpdir):
"""$PYTHONWARNINGS was added in python2.7"""
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
<commit_msg>Move docstring to appropriately placed comment<commit_after>import textwrap
def test_environ(script, tmpdir):
demo = tmpdir.joinpath('warnings_demo.py')
demo.write_text(textwrap.dedent('''
from logging import basicConfig
from pip._internal.utils import deprecation
deprecation.install_warning_logger()
basicConfig()
deprecation.deprecated("deprecated!", replacement=None, gone_in=None)
'''))
result = script.run('python', demo, expect_stderr=True)
expected = 'WARNING:pip._internal.deprecations:DEPRECATION: deprecated!\n'
assert result.stderr == expected
# $PYTHONWARNINGS was added in python2.7
script.environ['PYTHONWARNINGS'] = 'ignore'
result = script.run('python', demo)
assert result.stderr == ''
|
1e8b52b19bb6ff8ce3b635302e032349b0280379
|
tensorflow_zero_out/python/ops/zero_out_ops_test.py
|
tensorflow_zero_out/python/ops/zero_out_ops_test.py
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]).eval(), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
|
Fix zero_out test for 2.0
|
Fix zero_out test for 2.0
|
Python
|
apache-2.0
|
tensorflow/custom-op,tensorflow/custom-op,tensorflow/custom-op
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]).eval(), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
Fix zero_out test for 2.0
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
|
<commit_before># Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]).eval(), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
<commit_msg>Fix zero_out test for 2.0<commit_after>
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
|
# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]).eval(), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
Fix zero_out test for 2.0# Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
|
<commit_before># Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]).eval(), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
<commit_msg>Fix zero_out test for 2.0<commit_after># Copyright 2018 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for zero_out ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.platform import test
try:
from tensorflow_zero_out.python.ops.zero_out_ops import zero_out
except ImportError:
from zero_out_ops import zero_out
class ZeroOutTest(test.TestCase):
def testZeroOut(self):
with self.test_session():
self.assertAllClose(
zero_out([[1, 2], [3, 4]]), np.array([[1, 0], [0, 0]]))
if __name__ == '__main__':
test.main()
|
41f244171011a1bbb4a2a77e779979ba8cc9ecb5
|
zeus/api/resources/auth_index.py
|
zeus/api/resources/auth_index.py
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
identity_list = list(Identity.query.filter(
Identity.user_id == user_response.data['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user_response.data['id'],
))
return {
'isAuthenticated': True,
'user': json.loads(user_response.data),
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(
Identity.user_id == user['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user['id'],
))
return {
'isAuthenticated': True,
'user': user,
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
|
Fix auth API usage (this is why we wait for CI)
|
Fix auth API usage (this is why we wait for CI)
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
identity_list = list(Identity.query.filter(
Identity.user_id == user_response.data['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user_response.data['id'],
))
return {
'isAuthenticated': True,
'user': json.loads(user_response.data),
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
Fix auth API usage (this is why we wait for CI)
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(
Identity.user_id == user['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user['id'],
))
return {
'isAuthenticated': True,
'user': user,
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
|
<commit_before>import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
identity_list = list(Identity.query.filter(
Identity.user_id == user_response.data['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user_response.data['id'],
))
return {
'isAuthenticated': True,
'user': json.loads(user_response.data),
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
<commit_msg>Fix auth API usage (this is why we wait for CI)<commit_after>
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(
Identity.user_id == user['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user['id'],
))
return {
'isAuthenticated': True,
'user': user,
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
|
import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
identity_list = list(Identity.query.filter(
Identity.user_id == user_response.data['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user_response.data['id'],
))
return {
'isAuthenticated': True,
'user': json.loads(user_response.data),
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
Fix auth API usage (this is why we wait for CI)import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(
Identity.user_id == user['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user['id'],
))
return {
'isAuthenticated': True,
'user': user,
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
|
<commit_before>import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
identity_list = list(Identity.query.filter(
Identity.user_id == user_response.data['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user_response.data['id'],
))
return {
'isAuthenticated': True,
'user': json.loads(user_response.data),
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
<commit_msg>Fix auth API usage (this is why we wait for CI)<commit_after>import json
from zeus import auth
from zeus.api import client
from zeus.exceptions import ApiError
from zeus.models import Email, Identity
from .base import Resource
from ..schemas import EmailSchema, IdentitySchema, UserSchema
emails_schema = EmailSchema(many=True, strict=True)
identities_schema = IdentitySchema(many=True, strict=True)
user_schema = UserSchema(strict=True)
class AuthIndexResource(Resource):
auth_required = False
def get(self):
"""
Return information on the currently authenticated user.
"""
try:
user_response = client.get('/users/me')
except ApiError as exc:
if exc.code == 401:
return {
'isAuthenticated': False,
}
user = json.loads(user_response.data)
identity_list = list(Identity.query.filter(
Identity.user_id == user['id'],
))
email_list = list(Email.query.filter(
Email.user_id == user['id'],
))
return {
'isAuthenticated': True,
'user': user,
'emails': emails_schema.dump(email_list).data,
'identities': identities_schema.dump(identity_list).data,
}
def delete(self):
"""
Logout.
"""
auth.logout()
return {
'isAuthenticated': False,
'user': None,
}
|
218595e30388d88b649118bb20888c980ce67139
|
urwid/tests/util.py
|
urwid/tests/util.py
|
import urwid
class SelectableText(urwid.Text):
def selectable(self):
return 1
def keypress(self, size, key):
return key
|
import urwid
class SelectableText(urwid.Text):
def selectable(self):
return True
def keypress(self, size, key):
return key
|
Use True instead of 1
|
Use True instead of 1
|
Python
|
lgpl-2.1
|
wardi/urwid,wardi/urwid,inducer/urwid,inducer/urwid,wardi/urwid,urwid/urwid,urwid/urwid,urwid/urwid,inducer/urwid
|
import urwid
class SelectableText(urwid.Text):
def selectable(self):
return 1
def keypress(self, size, key):
return key
Use True instead of 1
|
import urwid
class SelectableText(urwid.Text):
def selectable(self):
return True
def keypress(self, size, key):
return key
|
<commit_before>import urwid
class SelectableText(urwid.Text):
def selectable(self):
return 1
def keypress(self, size, key):
return key
<commit_msg>Use True instead of 1<commit_after>
|
import urwid
class SelectableText(urwid.Text):
def selectable(self):
return True
def keypress(self, size, key):
return key
|
import urwid
class SelectableText(urwid.Text):
def selectable(self):
return 1
def keypress(self, size, key):
return key
Use True instead of 1import urwid
class SelectableText(urwid.Text):
def selectable(self):
return True
def keypress(self, size, key):
return key
|
<commit_before>import urwid
class SelectableText(urwid.Text):
def selectable(self):
return 1
def keypress(self, size, key):
return key
<commit_msg>Use True instead of 1<commit_after>import urwid
class SelectableText(urwid.Text):
def selectable(self):
return True
def keypress(self, size, key):
return key
|
de9c5235d379fcebbfc801fb23c3b9aa2f1fe4e8
|
benchmark/datasets/musicbrainz/extract-random-queries.py
|
benchmark/datasets/musicbrainz/extract-random-queries.py
|
#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with open(temp_file, 'r') as f:
with open(output_file, 'w') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
|
#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import codecs
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with codecs.open(temp_file, 'r', 'utf-8') as f:
with codecs.open(output_file, 'w', 'utf-8') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
|
Fix query extractor UTF-8 handling
|
Fix query extractor UTF-8 handling
|
Python
|
mit
|
xhochy/libfuzzymatch,xhochy/libfuzzymatch
|
#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with open(temp_file, 'r') as f:
with open(output_file, 'w') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
Fix query extractor UTF-8 handling
|
#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import codecs
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with codecs.open(temp_file, 'r', 'utf-8') as f:
with codecs.open(output_file, 'w', 'utf-8') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
|
<commit_before>#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with open(temp_file, 'r') as f:
with open(output_file, 'w') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
<commit_msg>Fix query extractor UTF-8 handling<commit_after>
|
#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import codecs
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with codecs.open(temp_file, 'r', 'utf-8') as f:
with codecs.open(output_file, 'w', 'utf-8') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
|
#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with open(temp_file, 'r') as f:
with open(output_file, 'w') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
Fix query extractor UTF-8 handling#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import codecs
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with codecs.open(temp_file, 'r', 'utf-8') as f:
with codecs.open(output_file, 'w', 'utf-8') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
|
<commit_before>#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with open(temp_file, 'r') as f:
with open(output_file, 'w') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
<commit_msg>Fix query extractor UTF-8 handling<commit_after>#!/usr/bin/env python
"""
Script to extract and then generate random queries for fuzzy searching.
Usage:
./extract-random-queries.py <infile> <outfile>
"""
import codecs
import os
from random import choice, randint, random
import string
from subprocess import call
import sys
from tempfile import mkstemp
__author__ = "Uwe L. Korn"
__license__ = "MIT"
input_file = sys.argv[1]
output_file = sys.argv[2]
# Randomly select 1000 lines from the input file and store them temporarily.
temp_f, temp_file = mkstemp()
call(['shuf', '-n', '1000', input_file, '-o', temp_file])
# Modifiy these queries so that they have a non-zero edit distance.
with codecs.open(temp_file, 'r', 'utf-8') as f:
with codecs.open(output_file, 'w', 'utf-8') as out:
for line in f.readlines():
if random() > 0.75:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
if random() > 0.25:
pos = randint(0, len(line) - 2)
line = line[0:pos] + choice(string.ascii_lowercase) + line[pos + 1:]
out.write(line)
# Remove the temporary file again.
os.unlink(temp_file)
|
7d50ca9b29a71a9cda2a5b78a0cb392108b217d5
|
roche/scripts/xml-server-load.py
|
roche/scripts/xml-server-load.py
|
# coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
# coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
#
# http://username:password@54.220.97.75:8080/exist
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
Add comment for full url with non guest user
|
Add comment for full url with non guest user
|
Python
|
mit
|
beijingren/roche-website,beijingren/roche-website,beijingren/roche-website,beijingren/roche-website
|
# coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
Add comment for full url with non guest user
|
# coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
#
# http://username:password@54.220.97.75:8080/exist
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
<commit_before># coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
<commit_msg>Add comment for full url with non guest user<commit_after>
|
# coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
#
# http://username:password@54.220.97.75:8080/exist
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
# coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
Add comment for full url with non guest user# coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
#
# http://username:password@54.220.97.75:8080/exist
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
<commit_before># coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
<commit_msg>Add comment for full url with non guest user<commit_after># coding=utf-8
#
# Must be called in roche root dir
#
import os
from os import walk
from eulexistdb.db import ExistDB
#
# Timeout higher?
#
#
# http://username:password@54.220.97.75:8080/exist
#
xmldb = ExistDB('http://54.220.97.75:8080/exist')
xmldb.createCollection('docker', True)
xmldb.createCollection('docker/texts', True)
os.chdir('../dublin-store')
for (dirpath, dirnames, filenames) in walk('浙江大學圖書館'):
xmldb.createCollection('docker/texts' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
print "--" + dirpath + '/' + filename
xmldb.load(f, 'docker/texts' + '/' + dirpath + '/' + filename, True)
#
# Load resources
#
for (dirpath, dirnames, filenames) in walk('resources'):
xmldb.createCollection('docker' + '/' + dirpath, True)
if filenames:
for filename in filenames:
with open(dirpath + '/' + filename) as f:
xmldb.load(f, 'docker' + '/' + dirpath + '/' + filename, True)
|
12352c1f7c9751727b8bd98ece576f9d690b520e
|
corehq/apps/export/migrations/0008_auto_20190906_2008.py
|
corehq/apps/export/migrations/0008_auto_20190906_2008.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
from corehq.apps.es.aggregations import (
AggregationTerm,
NestedTermAggregationsHelper,
)
from corehq.apps.es.ledgers import LedgerES
from corehq.apps.export.models.new import LedgerSectionEntry
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def initialize_ledger_combinations(apps, schema_editor):
terms = [
AggregationTerm('domain', 'domain'),
AggregationTerm('section_id', 'section_id'),
AggregationTerm('entry_id', 'entry_id'),
]
combos = [
a for a in NestedTermAggregationsHelper(base_query=LedgerES(), terms=terms).get_data()
]
for combo in combos:
LedgerSectionEntry.objects.get_or_create(
domain=combo.domain,
section_id=combo.section_id,
entry_id=combo.entry_id,
)
class Migration(migrations.Migration):
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
migrations.RunPython(initialize_ledger_combinations, elidable=True),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
"""
This migration used to contain some initialization for LedgerSectionEntry.
At the time it was run, this model was only used by exports and only on Supply projects
"""
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
]
|
Remove data migration from migration file
|
Remove data migration from migration file
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
from corehq.apps.es.aggregations import (
AggregationTerm,
NestedTermAggregationsHelper,
)
from corehq.apps.es.ledgers import LedgerES
from corehq.apps.export.models.new import LedgerSectionEntry
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def initialize_ledger_combinations(apps, schema_editor):
terms = [
AggregationTerm('domain', 'domain'),
AggregationTerm('section_id', 'section_id'),
AggregationTerm('entry_id', 'entry_id'),
]
combos = [
a for a in NestedTermAggregationsHelper(base_query=LedgerES(), terms=terms).get_data()
]
for combo in combos:
LedgerSectionEntry.objects.get_or_create(
domain=combo.domain,
section_id=combo.section_id,
entry_id=combo.entry_id,
)
class Migration(migrations.Migration):
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
migrations.RunPython(initialize_ledger_combinations, elidable=True),
]
Remove data migration from migration file
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
"""
This migration used to contain some initialization for LedgerSectionEntry.
At the time it was run, this model was only used by exports and only on Supply projects
"""
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
from corehq.apps.es.aggregations import (
AggregationTerm,
NestedTermAggregationsHelper,
)
from corehq.apps.es.ledgers import LedgerES
from corehq.apps.export.models.new import LedgerSectionEntry
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def initialize_ledger_combinations(apps, schema_editor):
terms = [
AggregationTerm('domain', 'domain'),
AggregationTerm('section_id', 'section_id'),
AggregationTerm('entry_id', 'entry_id'),
]
combos = [
a for a in NestedTermAggregationsHelper(base_query=LedgerES(), terms=terms).get_data()
]
for combo in combos:
LedgerSectionEntry.objects.get_or_create(
domain=combo.domain,
section_id=combo.section_id,
entry_id=combo.entry_id,
)
class Migration(migrations.Migration):
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
migrations.RunPython(initialize_ledger_combinations, elidable=True),
]
<commit_msg>Remove data migration from migration file<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
"""
This migration used to contain some initialization for LedgerSectionEntry.
At the time it was run, this model was only used by exports and only on Supply projects
"""
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
from corehq.apps.es.aggregations import (
AggregationTerm,
NestedTermAggregationsHelper,
)
from corehq.apps.es.ledgers import LedgerES
from corehq.apps.export.models.new import LedgerSectionEntry
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def initialize_ledger_combinations(apps, schema_editor):
terms = [
AggregationTerm('domain', 'domain'),
AggregationTerm('section_id', 'section_id'),
AggregationTerm('entry_id', 'entry_id'),
]
combos = [
a for a in NestedTermAggregationsHelper(base_query=LedgerES(), terms=terms).get_data()
]
for combo in combos:
LedgerSectionEntry.objects.get_or_create(
domain=combo.domain,
section_id=combo.section_id,
entry_id=combo.entry_id,
)
class Migration(migrations.Migration):
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
migrations.RunPython(initialize_ledger_combinations, elidable=True),
]
Remove data migration from migration file# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
"""
This migration used to contain some initialization for LedgerSectionEntry.
At the time it was run, this model was only used by exports and only on Supply projects
"""
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
from corehq.apps.es.aggregations import (
AggregationTerm,
NestedTermAggregationsHelper,
)
from corehq.apps.es.ledgers import LedgerES
from corehq.apps.export.models.new import LedgerSectionEntry
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def initialize_ledger_combinations(apps, schema_editor):
terms = [
AggregationTerm('domain', 'domain'),
AggregationTerm('section_id', 'section_id'),
AggregationTerm('entry_id', 'entry_id'),
]
combos = [
a for a in NestedTermAggregationsHelper(base_query=LedgerES(), terms=terms).get_data()
]
for combo in combos:
LedgerSectionEntry.objects.get_or_create(
domain=combo.domain,
section_id=combo.section_id,
entry_id=combo.entry_id,
)
class Migration(migrations.Migration):
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
migrations.RunPython(initialize_ledger_combinations, elidable=True),
]
<commit_msg>Remove data migration from migration file<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-09-06 20:08
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
"""
This migration used to contain some initialization for LedgerSectionEntry.
At the time it was run, this model was only used by exports and only on Supply projects
"""
dependencies = [
('export', '0007_auto_20190906_0149'),
]
operations = [
]
|
281e686d9599b06b718f2bf653921d51750fc00f
|
purchase_supplier_minimum_order/models/__init__.py
|
purchase_supplier_minimum_order/models/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company,
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Add mode switch to res.company
|
Add mode switch to res.company
|
Python
|
agpl-3.0
|
OpusVL/odoo-purchase-min-order
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Add mode switch to res.company
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company,
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Add mode switch to res.company<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company,
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Add mode switch to res.company# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company,
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Add mode switch to res.company<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_company,
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
41eff3cfcbf6e7615353e0e5126b729f956a89aa
|
pajbot/migration_revisions/db/0002_create_index_on_user_points.py
|
pajbot/migration_revisions/db/0002_create_index_on_user_points.py
|
def up(cursor, context):
cursor.execute("CREATE INDEX ON \"user\"(points)")
|
def up(cursor, context):
# the index on user(points) caches/indexes the table, ordered by points
# so queries like the top 30 point farmers can skip sorting the entire
# user table by points, and just instead use the sorting given by the
# user(points) index.
# e.g. compare (before and after creating the index):
# without an index on points:
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# -----------------------------------------------------------------------------------------------------------------------
# Limit (cost=1610.93..1610.96 rows=10 width=41) (actual time=12.005..12.009 rows=10 loops=1)
# -> Sort (cost=1610.93..1705.84 rows=37961 width=41) (actual time=12.003..12.004 rows=10 loops=1)
# Sort Key: points
# Sort Method: top-N heapsort Memory: 27kB
# -> Seq Scan on "user" (cost=0.00..790.61 rows=37961 width=41) (actual time=0.030..7.097 rows=37961 loops=1)
# Planning Time: 0.187 ms
# Execution Time: 12.039 ms
# (7 rows)
# creating the index...
#
# pajbot=> CREATE INDEX ON "user"(points);
# CREATE INDEX
# now with the index!
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# ---------------------------------------------------------------------------------------------------------------------------------------
# Limit (cost=0.29..0.59 rows=10 width=41) (actual time=0.041..0.050 rows=10 loops=1)
# -> Index Scan using user_points_idx on "user" (cost=0.29..1135.63 rows=37961 width=41) (actual time=0.038..0.046 rows=10 loops=1)
# Planning Time: 0.408 ms
# Execution Time: 0.071 ms
# (4 rows)
# notice the DB no longer sorts the users table, and query execution times have improved dramatically!
cursor.execute('CREATE INDEX ON "user"(points)')
|
Comment on the create index revision
|
Comment on the create index revision
|
Python
|
mit
|
pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot
|
def up(cursor, context):
cursor.execute("CREATE INDEX ON \"user\"(points)")
Comment on the create index revision
|
def up(cursor, context):
# the index on user(points) caches/indexes the table, ordered by points
# so queries like the top 30 point farmers can skip sorting the entire
# user table by points, and just instead use the sorting given by the
# user(points) index.
# e.g. compare (before and after creating the index):
# without an index on points:
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# -----------------------------------------------------------------------------------------------------------------------
# Limit (cost=1610.93..1610.96 rows=10 width=41) (actual time=12.005..12.009 rows=10 loops=1)
# -> Sort (cost=1610.93..1705.84 rows=37961 width=41) (actual time=12.003..12.004 rows=10 loops=1)
# Sort Key: points
# Sort Method: top-N heapsort Memory: 27kB
# -> Seq Scan on "user" (cost=0.00..790.61 rows=37961 width=41) (actual time=0.030..7.097 rows=37961 loops=1)
# Planning Time: 0.187 ms
# Execution Time: 12.039 ms
# (7 rows)
# creating the index...
#
# pajbot=> CREATE INDEX ON "user"(points);
# CREATE INDEX
# now with the index!
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# ---------------------------------------------------------------------------------------------------------------------------------------
# Limit (cost=0.29..0.59 rows=10 width=41) (actual time=0.041..0.050 rows=10 loops=1)
# -> Index Scan using user_points_idx on "user" (cost=0.29..1135.63 rows=37961 width=41) (actual time=0.038..0.046 rows=10 loops=1)
# Planning Time: 0.408 ms
# Execution Time: 0.071 ms
# (4 rows)
# notice the DB no longer sorts the users table, and query execution times have improved dramatically!
cursor.execute('CREATE INDEX ON "user"(points)')
|
<commit_before>def up(cursor, context):
cursor.execute("CREATE INDEX ON \"user\"(points)")
<commit_msg>Comment on the create index revision<commit_after>
|
def up(cursor, context):
# the index on user(points) caches/indexes the table, ordered by points
# so queries like the top 30 point farmers can skip sorting the entire
# user table by points, and just instead use the sorting given by the
# user(points) index.
# e.g. compare (before and after creating the index):
# without an index on points:
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# -----------------------------------------------------------------------------------------------------------------------
# Limit (cost=1610.93..1610.96 rows=10 width=41) (actual time=12.005..12.009 rows=10 loops=1)
# -> Sort (cost=1610.93..1705.84 rows=37961 width=41) (actual time=12.003..12.004 rows=10 loops=1)
# Sort Key: points
# Sort Method: top-N heapsort Memory: 27kB
# -> Seq Scan on "user" (cost=0.00..790.61 rows=37961 width=41) (actual time=0.030..7.097 rows=37961 loops=1)
# Planning Time: 0.187 ms
# Execution Time: 12.039 ms
# (7 rows)
# creating the index...
#
# pajbot=> CREATE INDEX ON "user"(points);
# CREATE INDEX
# now with the index!
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# ---------------------------------------------------------------------------------------------------------------------------------------
# Limit (cost=0.29..0.59 rows=10 width=41) (actual time=0.041..0.050 rows=10 loops=1)
# -> Index Scan using user_points_idx on "user" (cost=0.29..1135.63 rows=37961 width=41) (actual time=0.038..0.046 rows=10 loops=1)
# Planning Time: 0.408 ms
# Execution Time: 0.071 ms
# (4 rows)
# notice the DB no longer sorts the users table, and query execution times have improved dramatically!
cursor.execute('CREATE INDEX ON "user"(points)')
|
def up(cursor, context):
cursor.execute("CREATE INDEX ON \"user\"(points)")
Comment on the create index revisiondef up(cursor, context):
# the index on user(points) caches/indexes the table, ordered by points
# so queries like the top 30 point farmers can skip sorting the entire
# user table by points, and just instead use the sorting given by the
# user(points) index.
# e.g. compare (before and after creating the index):
# without an index on points:
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# -----------------------------------------------------------------------------------------------------------------------
# Limit (cost=1610.93..1610.96 rows=10 width=41) (actual time=12.005..12.009 rows=10 loops=1)
# -> Sort (cost=1610.93..1705.84 rows=37961 width=41) (actual time=12.003..12.004 rows=10 loops=1)
# Sort Key: points
# Sort Method: top-N heapsort Memory: 27kB
# -> Seq Scan on "user" (cost=0.00..790.61 rows=37961 width=41) (actual time=0.030..7.097 rows=37961 loops=1)
# Planning Time: 0.187 ms
# Execution Time: 12.039 ms
# (7 rows)
# creating the index...
#
# pajbot=> CREATE INDEX ON "user"(points);
# CREATE INDEX
# now with the index!
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# ---------------------------------------------------------------------------------------------------------------------------------------
# Limit (cost=0.29..0.59 rows=10 width=41) (actual time=0.041..0.050 rows=10 loops=1)
# -> Index Scan using user_points_idx on "user" (cost=0.29..1135.63 rows=37961 width=41) (actual time=0.038..0.046 rows=10 loops=1)
# Planning Time: 0.408 ms
# Execution Time: 0.071 ms
# (4 rows)
# notice the DB no longer sorts the users table, and query execution times have improved dramatically!
cursor.execute('CREATE INDEX ON "user"(points)')
|
<commit_before>def up(cursor, context):
cursor.execute("CREATE INDEX ON \"user\"(points)")
<commit_msg>Comment on the create index revision<commit_after>def up(cursor, context):
# the index on user(points) caches/indexes the table, ordered by points
# so queries like the top 30 point farmers can skip sorting the entire
# user table by points, and just instead use the sorting given by the
# user(points) index.
# e.g. compare (before and after creating the index):
# without an index on points:
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# -----------------------------------------------------------------------------------------------------------------------
# Limit (cost=1610.93..1610.96 rows=10 width=41) (actual time=12.005..12.009 rows=10 loops=1)
# -> Sort (cost=1610.93..1705.84 rows=37961 width=41) (actual time=12.003..12.004 rows=10 loops=1)
# Sort Key: points
# Sort Method: top-N heapsort Memory: 27kB
# -> Seq Scan on "user" (cost=0.00..790.61 rows=37961 width=41) (actual time=0.030..7.097 rows=37961 loops=1)
# Planning Time: 0.187 ms
# Execution Time: 12.039 ms
# (7 rows)
# creating the index...
#
# pajbot=> CREATE INDEX ON "user"(points);
# CREATE INDEX
# now with the index!
#
# pajbot=> EXPLAIN ANALYZE SELECT * FROM "user" ORDER BY points LIMIT 10;
# QUERY PLAN
# ---------------------------------------------------------------------------------------------------------------------------------------
# Limit (cost=0.29..0.59 rows=10 width=41) (actual time=0.041..0.050 rows=10 loops=1)
# -> Index Scan using user_points_idx on "user" (cost=0.29..1135.63 rows=37961 width=41) (actual time=0.038..0.046 rows=10 loops=1)
# Planning Time: 0.408 ms
# Execution Time: 0.071 ms
# (4 rows)
# notice the DB no longer sorts the users table, and query execution times have improved dramatically!
cursor.execute('CREATE INDEX ON "user"(points)')
|
aa473aed5ef3fdf200623e39ae0526e437a85575
|
pun.py
|
pun.py
|
#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
try:
client = MongoClient()
db = client['pundb']
collection = db['puns']
punDict = {"full": pun}
collection.update(punDict, punDict, True)
except:
# do nothing on insertion error
pass
|
#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
#try:
# client = MongoClient()
# db = client['pundb']
# collection = db['puns']
# punDict = {"full": pun}
# collection.update(punDict, punDict, True)
#except:
# # do nothing on insertion error
# pass
|
Disable mongo server for now
|
Disable mongo server for now
|
Python
|
mit
|
andrewmacheret/pun-server,andrewmacheret/pun-server,andrewmacheret/pun-server
|
#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
try:
client = MongoClient()
db = client['pundb']
collection = db['puns']
punDict = {"full": pun}
collection.update(punDict, punDict, True)
except:
# do nothing on insertion error
pass
Disable mongo server for now
|
#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
#try:
# client = MongoClient()
# db = client['pundb']
# collection = db['puns']
# punDict = {"full": pun}
# collection.update(punDict, punDict, True)
#except:
# # do nothing on insertion error
# pass
|
<commit_before>#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
try:
client = MongoClient()
db = client['pundb']
collection = db['puns']
punDict = {"full": pun}
collection.update(punDict, punDict, True)
except:
# do nothing on insertion error
pass
<commit_msg>Disable mongo server for now<commit_after>
|
#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
#try:
# client = MongoClient()
# db = client['pundb']
# collection = db['puns']
# punDict = {"full": pun}
# collection.update(punDict, punDict, True)
#except:
# # do nothing on insertion error
# pass
|
#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
try:
client = MongoClient()
db = client['pundb']
collection = db['puns']
punDict = {"full": pun}
collection.update(punDict, punDict, True)
except:
# do nothing on insertion error
pass
Disable mongo server for now#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
#try:
# client = MongoClient()
# db = client['pundb']
# collection = db['puns']
# punDict = {"full": pun}
# collection.update(punDict, punDict, True)
#except:
# # do nothing on insertion error
# pass
|
<commit_before>#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
try:
client = MongoClient()
db = client['pundb']
collection = db['puns']
punDict = {"full": pun}
collection.update(punDict, punDict, True)
except:
# do nothing on insertion error
pass
<commit_msg>Disable mongo server for now<commit_after>#!/usr/bin/python
# imports
from pprint import pprint
import urllib2
import socket
import sys
#import re
from bs4 import BeautifulSoup
from pymongo import MongoClient
# read the html content of the random pun page into a string
try:
html_content = urllib2.urlopen('http://www.punoftheday.com/cgi-bin/randompun.pl', timeout = 1).read()
except urllib2.URLError, e:
sys.stderr.write("(url error waiting for pun)\n")
sys.exit(1)
except socket.timeout, e:
sys.stderr.write("(socket timeout waiting for pun)\n")
sys.exit(1)
# create a beautiful soup object out of the raw html (the prettify is probably not necessary)
soup = BeautifulSoup(html_content, "html.parser")
soup.prettify()
# find and print the pun... it's the text in the element: div#main-content div.dropshadow1
pun = soup.find('div', {'id': 'main-content'}).find('div', {'class': 'dropshadow1'}).text
pun = pun.strip()
print pun
#try:
# client = MongoClient()
# db = client['pundb']
# collection = db['puns']
# punDict = {"full": pun}
# collection.update(punDict, punDict, True)
#except:
# # do nothing on insertion error
# pass
|
0683e4fb0431563758d93b39d102d1c634a4535b
|
run.py
|
run.py
|
#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subjects')
ext.add_model(models.SubjectDetail, url='subject-detail')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
|
#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subject')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
|
Change the subject url from /quip/subjects to /quip/subject.
|
Change the subject url from /quip/subjects to /quip/subject.
|
Python
|
bsd-2-clause
|
ohsu-qin/qiprofile-rest,ohsu-qin/qirest
|
#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subjects')
ext.add_model(models.SubjectDetail, url='subject-detail')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
Change the subject url from /quip/subjects to /quip/subject.
|
#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subject')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
|
<commit_before>#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subjects')
ext.add_model(models.SubjectDetail, url='subject-detail')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
<commit_msg>Change the subject url from /quip/subjects to /quip/subject.<commit_after>
|
#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subject')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
|
#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subjects')
ext.add_model(models.SubjectDetail, url='subject-detail')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
Change the subject url from /quip/subjects to /quip/subject.#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subject')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
|
<commit_before>#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subjects')
ext.add_model(models.SubjectDetail, url='subject-detail')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
<commit_msg>Change the subject url from /quip/subjects to /quip/subject.<commit_after>#!/usr/bin/env python
import importlib
import mongoengine
from eve import Eve
from eve_mongoengine import EveMongoengine
from qiprofile_rest import models
# The application.
app = Eve()
# The MongoEngine ORM extension.
ext = EveMongoengine(app)
# Register the model non-embedded documdent classes.
ext.add_model(models.Subject, url='subject')
ext.add_model(models.SessionDetail, url='session-detail')
if __name__ == '__main__':
app.run()
|
11fde526c9d25c0fb9ef678d4264a52e4845a518
|
pidman/pid/migrations/0002_pid_sequence_initial_value.py
|
pidman/pid/migrations/0002_pid_sequence_initial_value.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
print "got HERE 2"
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
Update pid sequence so it will work even if sequence already exists
|
Update pid sequence so it will work even if sequence already exists
|
Python
|
apache-2.0
|
emory-libraries/pidman,emory-libraries/pidman
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
print "got HERE 2"
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
Update pid sequence so it will work even if sequence already exists
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
print "got HERE 2"
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
<commit_msg>Update pid sequence so it will work even if sequence already exists<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
print "got HERE 2"
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
Update pid sequence so it will work even if sequence already exists# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME,
last=last_val)
print "got HERE 2"
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
<commit_msg>Update pid sequence so it will work even if sequence already exists<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from pidman.pid.noid import decode_noid
from pidman.pid import models as pid_models
def pid_sequence_lastvalue(apps, schema_editor):
# if the database has existing pids, update the sequence last value
# so it will start minting pids starting after the current set
Pid = apps.get_model("pid", "Pid")
Sequence = apps.get_model("sequences", "Sequence")
if Pid.objects.count():
print Pid.objects.count()
max_noid = Pid.objects.all() \
.aggregate(models.Max('pid')).values()[0]
last_val = decode_noid(max_noid)
pid_seq, created = Sequence.objects.get_or_create(name=pid_models.Pid.SEQUENCE_NAME)
pid_seq.last = last_val
pid_seq.save()
def remove_pid_sequence(apps, schema_editor):
Sequence = apps.get_model("sequences", "Sequence")
Sequence.objects.get(name=pid_models.Pid.SEQUENCE_NAME).delete()
class Migration(migrations.Migration):
dependencies = [
('pid', '0001_initial'),
('sequences', '0001_initial'),
]
operations = [
migrations.RunPython(pid_sequence_lastvalue,
remove_pid_sequence),
]
|
abe6f3430efb7291054d96a51cc4a290e0bd7a59
|
osf/migrations/0224_population_registration_subscription_notifications.py
|
osf/migrations/0224_population_registration_subscription_notifications.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
),
migrations.RunPython(populate_subscriptions, revert)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
)
]
|
Remove call to run management commands that we didn't use during migration
|
Remove call to run management commands that we didn't use during migration
|
Python
|
apache-2.0
|
brianjgeiger/osf.io,mfraezz/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,mfraezz/osf.io,adlius/osf.io,felliott/osf.io,adlius/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,felliott/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,baylee-d/osf.io,cslzchen/osf.io,aaxelb/osf.io,aaxelb/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,felliott/osf.io
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
),
migrations.RunPython(populate_subscriptions, revert)
]
Remove call to run management commands that we didn't use during migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
),
migrations.RunPython(populate_subscriptions, revert)
]
<commit_msg>Remove call to run management commands that we didn't use during migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
),
migrations.RunPython(populate_subscriptions, revert)
]
Remove call to run management commands that we didn't use during migration# -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
),
migrations.RunPython(populate_subscriptions, revert)
]
<commit_msg>Remove call to run management commands that we didn't use during migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.28 on 2020-10-26 18:43
from __future__ import unicode_literals
from django.db import migrations, models
from osf.management.commands.add_notification_subscription import add_reviews_notification_setting
from osf.management.commands.populate_registration_provider_notification_subscriptions import populate_registration_provider_notification_subscriptions
def revert(apps, schema_editor):
NotificationSubscription = apps.get_model('osf', 'NotificationSubscription')
# The revert of this migration deletes all NotificationSubscription instances
NotificationSubscription.objects.filter(provider__isnull=False, provider__type='osf.registrationprovider').delete()
def populate_subscriptions(*args, **kwargs):
add_reviews_notification_setting('global_reviews')
populate_registration_provider_notification_subscriptions()
class Migration(migrations.Migration):
dependencies = [
('osf', '0223_auto_20201026_1843'),
]
operations = [
migrations.AlterField(
model_name='notificationsubscription',
name='event_name',
field=models.CharField(max_length=100),
)
]
|
78f049ce9713dabd3eec544494dadcab7ff93d4c
|
sui_hei/templatetags/markdown.py
|
sui_hei/templatetags/markdown.py
|
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra'])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
from markdown.extensions.headerid import HeaderIdExtension
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra', HeaderIdExtension(level=4)])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
Add header id extension for github preferences
|
Add header id extension for github preferences
|
Python
|
mit
|
heyrict/cindy,heyrict/cindy,heyrict/cindy
|
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra'])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
Add header id extension for github preferences
|
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
from markdown.extensions.headerid import HeaderIdExtension
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra', HeaderIdExtension(level=4)])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
<commit_before>import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra'])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
<commit_msg>Add header id extension for github preferences<commit_after>
|
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
from markdown.extensions.headerid import HeaderIdExtension
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra', HeaderIdExtension(level=4)])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra'])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
Add header id extension for github preferencesimport re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
from markdown.extensions.headerid import HeaderIdExtension
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra', HeaderIdExtension(level=4)])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
<commit_before>import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra'])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
<commit_msg>Add header id extension for github preferences<commit_after>import re
from bs4 import BeautifulSoup
from django import template
from django.template.defaultfilters import stringfilter
from markdown import markdown as md
from markdown.extensions.headerid import HeaderIdExtension
register = template.Library()
@stringfilter
@register.filter(is_safe=True)
def text2md(value):
'''
convert markdown-like text to html.
strip header <p> and footer </p> if p is True.
'''
returns = md(value, ['markdown.extensions.extra', HeaderIdExtension(level=4)])
returns = BeautifulSoup(returns, 'html5lib').prettify(encoding="utf8")
return returns.decode('utf8')
@register.filter(is_safe=True)
@stringfilter
def line2md(value, p=True):
returns = BeautifulSoup(value, 'html5lib').get_text()
returns = re.sub("^([*+-]) ", r"\\\1 ", returns)
returns = md(returns, ['markdown.extensions.extra'])
if p: returns = returns[3:-4]
return returns
|
c258b1995bdb870b3818a3dca402b86f2bb85fe9
|
chmvh_website/gallery/management/commands/generatethumbnails.py
|
chmvh_website/gallery/management/commands/generatethumbnails.py
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for the gallery images'
def handle(self, *args, **kwargs):
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
Add option to overwrite existing thumbnails.
|
Add option to overwrite existing thumbnails.
|
Python
|
mit
|
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for the gallery images'
def handle(self, *args, **kwargs):
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
Add option to overwrite existing thumbnails.
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
<commit_before>from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for the gallery images'
def handle(self, *args, **kwargs):
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
<commit_msg>Add option to overwrite existing thumbnails.<commit_after>
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for the gallery images'
def handle(self, *args, **kwargs):
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
Add option to overwrite existing thumbnails.from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
<commit_before>from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for the gallery images'
def handle(self, *args, **kwargs):
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
<commit_msg>Add option to overwrite existing thumbnails.<commit_after>from django.core.management.base import BaseCommand
from gallery import models
from gallery.tasks import create_thumbnail
class Command(BaseCommand):
help = 'Generates thumbnails for gallery images without thumbnails'
def add_arguments(self, parser):
parser.add_argument(
'--overwrite',
action='store_true',
default=False,
dest='overwrite',
help='Generate thumbnails for all pictures.')
def handle(self, *args, **kwargs):
if kwargs['overwrite']:
self.stdout.write(self.style.WARNING(
'Overwriting previously generated thumbnails.'))
patients = models.Patient.objects.all()
else:
patients = models.Patient.objects.filter(thumbnail=None)
count = patients.count()
if count == 0:
self.stdout.write("No thumbnails to generate.")
return
elif count == 1:
count_bit = '1 thumbnail'
else:
count_bit = '{0} thumbnails'.format(count)
self.stdout.write('Generating {}...'.format(count_bit))
for patient in patients:
if kwargs['overwrite'] and patient.thumbnail:
patient.thumbnail.delete()
create_thumbnail(patient)
self.stdout.write(self.style.SUCCESS(
"Successfully generated {}.".format(count_bit)))
|
4c88da91221899c22cfe9030f40cbb4e0b3e904d
|
{{project.repo_name}}/tests/test_{{project.repo_name}}.py
|
{{project.repo_name}}/tests/test_{{project.repo_name}}.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class TestComplexity(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class Test{{ project.repo_name|capitalize }}(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
Remove hardcoded name of TestCase.
|
Remove hardcoded name of TestCase.
|
Python
|
bsd-2-clause
|
rockymeza/cookiecutter-djangoapp,aeroaks/cookiecutter-pyqt4,rockymeza/cookiecutter-djangoapp
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class TestComplexity(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
Remove hardcoded name of TestCase.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class Test{{ project.repo_name|capitalize }}(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class TestComplexity(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove hardcoded name of TestCase.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class Test{{ project.repo_name|capitalize }}(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class TestComplexity(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
Remove hardcoded name of TestCase.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class Test{{ project.repo_name|capitalize }}(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class TestComplexity(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove hardcoded name of TestCase.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_{{ project.repo_name }}
------------
Tests for `{{ project.repo_name }}` module.
"""
import os
import shutil
import unittest
from {{ project.repo_name }} import {{ project.repo_name }}
class Test{{ project.repo_name|capitalize }}(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
404f300b9e8ce33149324888a42ce22fb5c00dc0
|
api/bioguide/management/commands/import_bioguide.py
|
api/bioguide/management/commands/import_bioguide.py
|
import csv
import sys
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for row in csv.reader(sys.stdin):
row = dict(zip(fields, row))
name = name_tools.split(row['name'])
row['prefix'], row['first'], row['last'], row['suffix'] = name
del(row['name'])
print row
legislator = Legislator.objects.create(**row)
print legislator
|
import csv
import sys
import urllib2
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
from lxml.html import document_fromstring
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for congress in range(1, 112):
data = 'lastname=&firstname=&position=&state=&party=&congress=%s' % str(congress)
url = 'http://bioguide.congress.gov/biosearch/biosearch1.asp'
req = urllib2.Request(url, data)
response = urllib2.urlopen(req).read()
doc = document_fromstring(response)
for row in doc.cssselect('tr'):
try:
cells = row.cssselect('td')
if len(cells) != 6:
continue
namecell = cells[0]
birth_death, position, party, state, congress = [x.text.encode('utf-8') if x.text else '' for x in cells[1:]]
a = namecell.cssselect('a')
name = None
if a:
a = a[0]
name = a.text
bioguide_id = a.values()[0].split('=')[-1]
else:
continue
data = {'bioguide_id': bioguide_id,
'birth_death': birth_death,
'position': position,
'party': party,
'state': state,
'congress': congress, }
data['prefix'], data['first'], data['last'], data['suffix'] = name_tools.split(name)
legislator = Legislator.objects.create(**data)
print data
except Exception, e:
print Exception, e
|
Update bioguide importer to scrape data from bioguide.congress.gov
|
Update bioguide importer to scrape data from bioguide.congress.gov
|
Python
|
bsd-3-clause
|
sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words
|
import csv
import sys
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for row in csv.reader(sys.stdin):
row = dict(zip(fields, row))
name = name_tools.split(row['name'])
row['prefix'], row['first'], row['last'], row['suffix'] = name
del(row['name'])
print row
legislator = Legislator.objects.create(**row)
print legislator
Update bioguide importer to scrape data from bioguide.congress.gov
|
import csv
import sys
import urllib2
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
from lxml.html import document_fromstring
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for congress in range(1, 112):
data = 'lastname=&firstname=&position=&state=&party=&congress=%s' % str(congress)
url = 'http://bioguide.congress.gov/biosearch/biosearch1.asp'
req = urllib2.Request(url, data)
response = urllib2.urlopen(req).read()
doc = document_fromstring(response)
for row in doc.cssselect('tr'):
try:
cells = row.cssselect('td')
if len(cells) != 6:
continue
namecell = cells[0]
birth_death, position, party, state, congress = [x.text.encode('utf-8') if x.text else '' for x in cells[1:]]
a = namecell.cssselect('a')
name = None
if a:
a = a[0]
name = a.text
bioguide_id = a.values()[0].split('=')[-1]
else:
continue
data = {'bioguide_id': bioguide_id,
'birth_death': birth_death,
'position': position,
'party': party,
'state': state,
'congress': congress, }
data['prefix'], data['first'], data['last'], data['suffix'] = name_tools.split(name)
legislator = Legislator.objects.create(**data)
print data
except Exception, e:
print Exception, e
|
<commit_before>import csv
import sys
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for row in csv.reader(sys.stdin):
row = dict(zip(fields, row))
name = name_tools.split(row['name'])
row['prefix'], row['first'], row['last'], row['suffix'] = name
del(row['name'])
print row
legislator = Legislator.objects.create(**row)
print legislator
<commit_msg>Update bioguide importer to scrape data from bioguide.congress.gov<commit_after>
|
import csv
import sys
import urllib2
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
from lxml.html import document_fromstring
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for congress in range(1, 112):
data = 'lastname=&firstname=&position=&state=&party=&congress=%s' % str(congress)
url = 'http://bioguide.congress.gov/biosearch/biosearch1.asp'
req = urllib2.Request(url, data)
response = urllib2.urlopen(req).read()
doc = document_fromstring(response)
for row in doc.cssselect('tr'):
try:
cells = row.cssselect('td')
if len(cells) != 6:
continue
namecell = cells[0]
birth_death, position, party, state, congress = [x.text.encode('utf-8') if x.text else '' for x in cells[1:]]
a = namecell.cssselect('a')
name = None
if a:
a = a[0]
name = a.text
bioguide_id = a.values()[0].split('=')[-1]
else:
continue
data = {'bioguide_id': bioguide_id,
'birth_death': birth_death,
'position': position,
'party': party,
'state': state,
'congress': congress, }
data['prefix'], data['first'], data['last'], data['suffix'] = name_tools.split(name)
legislator = Legislator.objects.create(**data)
print data
except Exception, e:
print Exception, e
|
import csv
import sys
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for row in csv.reader(sys.stdin):
row = dict(zip(fields, row))
name = name_tools.split(row['name'])
row['prefix'], row['first'], row['last'], row['suffix'] = name
del(row['name'])
print row
legislator = Legislator.objects.create(**row)
print legislator
Update bioguide importer to scrape data from bioguide.congress.govimport csv
import sys
import urllib2
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
from lxml.html import document_fromstring
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for congress in range(1, 112):
data = 'lastname=&firstname=&position=&state=&party=&congress=%s' % str(congress)
url = 'http://bioguide.congress.gov/biosearch/biosearch1.asp'
req = urllib2.Request(url, data)
response = urllib2.urlopen(req).read()
doc = document_fromstring(response)
for row in doc.cssselect('tr'):
try:
cells = row.cssselect('td')
if len(cells) != 6:
continue
namecell = cells[0]
birth_death, position, party, state, congress = [x.text.encode('utf-8') if x.text else '' for x in cells[1:]]
a = namecell.cssselect('a')
name = None
if a:
a = a[0]
name = a.text
bioguide_id = a.values()[0].split('=')[-1]
else:
continue
data = {'bioguide_id': bioguide_id,
'birth_death': birth_death,
'position': position,
'party': party,
'state': state,
'congress': congress, }
data['prefix'], data['first'], data['last'], data['suffix'] = name_tools.split(name)
legislator = Legislator.objects.create(**data)
print data
except Exception, e:
print Exception, e
|
<commit_before>import csv
import sys
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for row in csv.reader(sys.stdin):
row = dict(zip(fields, row))
name = name_tools.split(row['name'])
row['prefix'], row['first'], row['last'], row['suffix'] = name
del(row['name'])
print row
legislator = Legislator.objects.create(**row)
print legislator
<commit_msg>Update bioguide importer to scrape data from bioguide.congress.gov<commit_after>import csv
import sys
import urllib2
from django.core.management.base import BaseCommand, CommandError
from bioguide.models import Legislator
import name_tools
from lxml.html import document_fromstring
class Command(BaseCommand):
def handle(self, *args, **options):
fields = ['bioguide_id', 'name', 'birth_death', 'position', 'party', 'state', 'congress', ]
for congress in range(1, 112):
data = 'lastname=&firstname=&position=&state=&party=&congress=%s' % str(congress)
url = 'http://bioguide.congress.gov/biosearch/biosearch1.asp'
req = urllib2.Request(url, data)
response = urllib2.urlopen(req).read()
doc = document_fromstring(response)
for row in doc.cssselect('tr'):
try:
cells = row.cssselect('td')
if len(cells) != 6:
continue
namecell = cells[0]
birth_death, position, party, state, congress = [x.text.encode('utf-8') if x.text else '' for x in cells[1:]]
a = namecell.cssselect('a')
name = None
if a:
a = a[0]
name = a.text
bioguide_id = a.values()[0].split('=')[-1]
else:
continue
data = {'bioguide_id': bioguide_id,
'birth_death': birth_death,
'position': position,
'party': party,
'state': state,
'congress': congress, }
data['prefix'], data['first'], data['last'], data['suffix'] = name_tools.split(name)
legislator = Legislator.objects.create(**data)
print data
except Exception, e:
print Exception, e
|
c23c70dd10797a162efb137a53eec53c6ce554c7
|
deflect/management/commands/checkurls.py
|
deflect/management/commands/checkurls.py
|
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
from django.contrib.sites.models import Site
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
base = 'http://%s' % Site.objects.get_current().domain
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
Print full URL for admin edit link
|
Print full URL for admin edit link
|
Python
|
bsd-3-clause
|
jbittel/django-deflect
|
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=reverse('admin:deflect_shorturl_change', args=(url.id,)))
Print full URL for admin edit link
|
from django.contrib.sites.models import Site
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
base = 'http://%s' % Site.objects.get_current().domain
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
<commit_before>from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=reverse('admin:deflect_shorturl_change', args=(url.id,)))
<commit_msg>Print full URL for admin edit link<commit_after>
|
from django.contrib.sites.models import Site
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
base = 'http://%s' % Site.objects.get_current().domain
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=reverse('admin:deflect_shorturl_change', args=(url.id,)))
Print full URL for admin edit linkfrom django.contrib.sites.models import Site
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
base = 'http://%s' % Site.objects.get_current().domain
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
<commit_before>from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=reverse('admin:deflect_shorturl_change', args=(url.id,)))
<commit_msg>Print full URL for admin edit link<commit_after>from django.contrib.sites.models import Site
from django.core.management.base import NoArgsCommand
from django.core.urlresolvers import reverse
import requests
from deflect.models import ShortURL
class Command(NoArgsCommand):
help = "Validate short URL redirect targets"
def handle_noargs(self, *args, **options):
for url in ShortURL.objects.all():
try:
url.check_status()
except requests.exceptions.RequestException as e:
print self.bad_redirect_text(url, e)
def bad_redirect_text(self, url, exception):
"""
Return informational text for a URL that raised an
exception.
"""
base = 'http://%s' % Site.objects.get_current().domain
return """
Redirect {key} with target {target} returns {error}
Edit this short URL: {edit}
""".format(key=url.key, target=url.long_url, error=exception,
edit=base + reverse('admin:deflect_shorturl_change', args=(url.id,)))
|
d58fa915665c3a2c99588bb19bfaf14e6728371f
|
channels/__init__.py
|
channels/__init__.py
|
import django
__version__ = "2.4.0"
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
DEFAULT_CHANNEL_LAYER = "default"
|
__version__ = "2.4.0"
try:
import django
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
except ModuleNotFoundError:
pass
DEFAULT_CHANNEL_LAYER = "default"
|
Fix RTD build for missing Django dependency.
|
Fix RTD build for missing Django dependency.
|
Python
|
bsd-3-clause
|
andrewgodwin/channels,django/channels,andrewgodwin/django-channels
|
import django
__version__ = "2.4.0"
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
DEFAULT_CHANNEL_LAYER = "default"
Fix RTD build for missing Django dependency.
|
__version__ = "2.4.0"
try:
import django
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
except ModuleNotFoundError:
pass
DEFAULT_CHANNEL_LAYER = "default"
|
<commit_before>import django
__version__ = "2.4.0"
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
DEFAULT_CHANNEL_LAYER = "default"
<commit_msg>Fix RTD build for missing Django dependency.<commit_after>
|
__version__ = "2.4.0"
try:
import django
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
except ModuleNotFoundError:
pass
DEFAULT_CHANNEL_LAYER = "default"
|
import django
__version__ = "2.4.0"
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
DEFAULT_CHANNEL_LAYER = "default"
Fix RTD build for missing Django dependency.__version__ = "2.4.0"
try:
import django
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
except ModuleNotFoundError:
pass
DEFAULT_CHANNEL_LAYER = "default"
|
<commit_before>import django
__version__ = "2.4.0"
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
DEFAULT_CHANNEL_LAYER = "default"
<commit_msg>Fix RTD build for missing Django dependency.<commit_after>__version__ = "2.4.0"
try:
import django
if django.VERSION < (3, 2):
default_app_config = "channels.apps.ChannelsConfig"
except ModuleNotFoundError:
pass
DEFAULT_CHANNEL_LAYER = "default"
|
d10b6cca0b0925b4ab6670b375f30df4938767a7
|
app.py
|
app.py
|
import flask
import config
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('dashboard.html')
@app.route('/new')
def new():
return flask.render_template('new_procedure.html')
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
|
import flask
import config
app = flask.Flask(__name__)
@app.route('/')
@app.route('/dashboard')
def render_dashboard():
return flask.render_template('dashboard.html')
@app.route('/new')
def render_new_procedure_form():
return flask.render_template('new_procedure.html')
# Endpoint for new opportunity form submission
@app.route('/opportunity', methods=['POST'])
def new_opportunity():
print(str.format(""))
return flask.redirect(flask.url_for('new', code=201))
# Endpoint for receiving SMS messages from Twilio
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
|
Add simple endpoint for form submission
|
Add simple endpoint for form submission
|
Python
|
mit
|
nhshd-slot/SLOT,bsharif/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT,bsharif/SLOT,bsharif/SLOT
|
import flask
import config
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('dashboard.html')
@app.route('/new')
def new():
return flask.render_template('new_procedure.html')
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
Add simple endpoint for form submission
|
import flask
import config
app = flask.Flask(__name__)
@app.route('/')
@app.route('/dashboard')
def render_dashboard():
return flask.render_template('dashboard.html')
@app.route('/new')
def render_new_procedure_form():
return flask.render_template('new_procedure.html')
# Endpoint for new opportunity form submission
@app.route('/opportunity', methods=['POST'])
def new_opportunity():
print(str.format(""))
return flask.redirect(flask.url_for('new', code=201))
# Endpoint for receiving SMS messages from Twilio
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
|
<commit_before>import flask
import config
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('dashboard.html')
@app.route('/new')
def new():
return flask.render_template('new_procedure.html')
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
<commit_msg>Add simple endpoint for form submission<commit_after>
|
import flask
import config
app = flask.Flask(__name__)
@app.route('/')
@app.route('/dashboard')
def render_dashboard():
return flask.render_template('dashboard.html')
@app.route('/new')
def render_new_procedure_form():
return flask.render_template('new_procedure.html')
# Endpoint for new opportunity form submission
@app.route('/opportunity', methods=['POST'])
def new_opportunity():
print(str.format(""))
return flask.redirect(flask.url_for('new', code=201))
# Endpoint for receiving SMS messages from Twilio
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
|
import flask
import config
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('dashboard.html')
@app.route('/new')
def new():
return flask.render_template('new_procedure.html')
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
Add simple endpoint for form submissionimport flask
import config
app = flask.Flask(__name__)
@app.route('/')
@app.route('/dashboard')
def render_dashboard():
return flask.render_template('dashboard.html')
@app.route('/new')
def render_new_procedure_form():
return flask.render_template('new_procedure.html')
# Endpoint for new opportunity form submission
@app.route('/opportunity', methods=['POST'])
def new_opportunity():
print(str.format(""))
return flask.redirect(flask.url_for('new', code=201))
# Endpoint for receiving SMS messages from Twilio
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
|
<commit_before>import flask
import config
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('dashboard.html')
@app.route('/new')
def new():
return flask.render_template('new_procedure.html')
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
<commit_msg>Add simple endpoint for form submission<commit_after>import flask
import config
app = flask.Flask(__name__)
@app.route('/')
@app.route('/dashboard')
def render_dashboard():
return flask.render_template('dashboard.html')
@app.route('/new')
def render_new_procedure_form():
return flask.render_template('new_procedure.html')
# Endpoint for new opportunity form submission
@app.route('/opportunity', methods=['POST'])
def new_opportunity():
print(str.format(""))
return flask.redirect(flask.url_for('new', code=201))
# Endpoint for receiving SMS messages from Twilio
@app.route('/sms', methods=['POST'])
def receive_sms():
print(str.format("Received SMS: \n"
"To: {0}\n"
"From: {1}\n"
"Body: {2}\n",
str(flask.request.form['To']),
str(flask.request.form['From']),
str(flask.request.form['Body'])))
return '<Response></Response>'
if __name__ == '__main__':
app.debug = config.debug_mode
print(str.format("Debug Mode is: {0}", app.debug))
app.run()
|
dbc526d43b6a69c0bd120f3a0abb519f8bf353a8
|
dbimport/csv_util.py
|
dbimport/csv_util.py
|
import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
|
import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
def scrub_row(row):
return {csv_field: remove_commas_and_apostrophes(value)
for csv_field, value in row.items()}
|
Add scrub_row() to pre-process all fields in incoming csv row
|
Add scrub_row() to pre-process all fields in incoming csv row
|
Python
|
mit
|
KatrinaE/importlite
|
import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
Add scrub_row() to pre-process all fields in incoming csv row
|
import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
def scrub_row(row):
return {csv_field: remove_commas_and_apostrophes(value)
for csv_field, value in row.items()}
|
<commit_before>import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
<commit_msg>Add scrub_row() to pre-process all fields in incoming csv row<commit_after>
|
import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
def scrub_row(row):
return {csv_field: remove_commas_and_apostrophes(value)
for csv_field, value in row.items()}
|
import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
Add scrub_row() to pre-process all fields in incoming csv rowimport csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
def scrub_row(row):
return {csv_field: remove_commas_and_apostrophes(value)
for csv_field, value in row.items()}
|
<commit_before>import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
<commit_msg>Add scrub_row() to pre-process all fields in incoming csv row<commit_after>import csv
import re
def read_csv(path):
with open(path, 'rU') as data:
reader = csv.DictReader(data)
for row in reader:
yield row
def remove_commas_and_apostrophes(value):
"""Remove commas and single quotes from all values in row.
Sqlite can't handle them."""
return re.sub("[,']", '', value)
def scrub_row(row):
return {csv_field: remove_commas_and_apostrophes(value)
for csv_field, value in row.items()}
|
2ce3b7bb5207fcdbedd731bb9cbc928393654507
|
functional_tests/test_homepage.py
|
functional_tests/test_homepage.py
|
import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_django_working(self):
self.browser.get('http://localhost:8000')
self.assertIn('Django', self.browser.title)
if __name__ == "__main__":
unittest.main()
|
import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_can_see_todays_recipe(self):
# Alice goes to our website
self.browser.get('http://localhost:8000')
# She notices that the title says 'Recept van de dag'
self.assertIn('Recept van de dag', self.browser.title)
# There is also a header on the page that says 'Recept van de dag'
header_text = self.browser.get_element_by_tag_name('h1')
self.assertIn('Recept van de dag', header_text.text)
if __name__ == "__main__":
unittest.main()
|
Add functional test to test the home page
|
Add functional test to test the home page
|
Python
|
agpl-3.0
|
XeryusTC/rotd,XeryusTC/rotd,XeryusTC/rotd
|
import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_django_working(self):
self.browser.get('http://localhost:8000')
self.assertIn('Django', self.browser.title)
if __name__ == "__main__":
unittest.main()
Add functional test to test the home page
|
import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_can_see_todays_recipe(self):
# Alice goes to our website
self.browser.get('http://localhost:8000')
# She notices that the title says 'Recept van de dag'
self.assertIn('Recept van de dag', self.browser.title)
# There is also a header on the page that says 'Recept van de dag'
header_text = self.browser.get_element_by_tag_name('h1')
self.assertIn('Recept van de dag', header_text.text)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_django_working(self):
self.browser.get('http://localhost:8000')
self.assertIn('Django', self.browser.title)
if __name__ == "__main__":
unittest.main()
<commit_msg>Add functional test to test the home page<commit_after>
|
import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_can_see_todays_recipe(self):
# Alice goes to our website
self.browser.get('http://localhost:8000')
# She notices that the title says 'Recept van de dag'
self.assertIn('Recept van de dag', self.browser.title)
# There is also a header on the page that says 'Recept van de dag'
header_text = self.browser.get_element_by_tag_name('h1')
self.assertIn('Recept van de dag', header_text.text)
if __name__ == "__main__":
unittest.main()
|
import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_django_working(self):
self.browser.get('http://localhost:8000')
self.assertIn('Django', self.browser.title)
if __name__ == "__main__":
unittest.main()
Add functional test to test the home pageimport unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_can_see_todays_recipe(self):
# Alice goes to our website
self.browser.get('http://localhost:8000')
# She notices that the title says 'Recept van de dag'
self.assertIn('Recept van de dag', self.browser.title)
# There is also a header on the page that says 'Recept van de dag'
header_text = self.browser.get_element_by_tag_name('h1')
self.assertIn('Recept van de dag', header_text.text)
if __name__ == "__main__":
unittest.main()
|
<commit_before>import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_django_working(self):
self.browser.get('http://localhost:8000')
self.assertIn('Django', self.browser.title)
if __name__ == "__main__":
unittest.main()
<commit_msg>Add functional test to test the home page<commit_after>import unittest
from selenium import webdriver
class HomePageRecipeTests(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.close()
def test_can_see_todays_recipe(self):
# Alice goes to our website
self.browser.get('http://localhost:8000')
# She notices that the title says 'Recept van de dag'
self.assertIn('Recept van de dag', self.browser.title)
# There is also a header on the page that says 'Recept van de dag'
header_text = self.browser.get_element_by_tag_name('h1')
self.assertIn('Recept van de dag', header_text.text)
if __name__ == "__main__":
unittest.main()
|
bad64ce546a98d06497063d6ab55c311fd5ac555
|
src/sentry/web/frontend/project_plugin_configure.py
|
src/sentry/web/frontend/project_plugin_configure.py
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request=request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
|
Use kwargs to fix Plugin2 BS configure ordering
|
Use kwargs to fix Plugin2 BS configure ordering
|
Python
|
bsd-3-clause
|
zenefits/sentry,zenefits/sentry,zenefits/sentry,zenefits/sentry,zenefits/sentry
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
Use kwargs to fix Plugin2 BS configure ordering
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request=request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
|
<commit_before>from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
<commit_msg>Use kwargs to fix Plugin2 BS configure ordering<commit_after>
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request=request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
Use kwargs to fix Plugin2 BS configure orderingfrom __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request=request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
|
<commit_before>from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
<commit_msg>Use kwargs to fix Plugin2 BS configure ordering<commit_after>from __future__ import absolute_import
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginConfigureView(ProjectView):
required_scope = 'project:write'
def handle(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
if not plugin.can_configure_for_project(project):
return self.redirect(reverse('sentry-manage-project', args=[project.organization.slug, project.slug]))
view = plugin.configure(request=request, project=project)
if isinstance(view, HttpResponse):
return view
context = {
'page': 'plugin',
'title': plugin.get_title(),
'view': view,
'plugin': plugin,
'plugin_is_enabled': plugin.is_enabled(project),
}
return self.respond('sentry/projects/plugins/configure.html', context)
|
66379ee0118446759fa9709f45406f607245deb2
|
honeybadger/utils.py
|
honeybadger/utils.py
|
import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if data.has_key(key):
data[key] = "[FILTERED]"
return data
|
import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if key in data:
data[key] = "[FILTERED]"
return data
|
Remove has_key to ensure python3 compatibility
|
Remove has_key to ensure python3 compatibility
|
Python
|
mit
|
honeybadger-io/honeybadger-python,honeybadger-io/honeybadger-python
|
import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if data.has_key(key):
data[key] = "[FILTERED]"
return data
Remove has_key to ensure python3 compatibility
|
import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if key in data:
data[key] = "[FILTERED]"
return data
|
<commit_before>import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if data.has_key(key):
data[key] = "[FILTERED]"
return data
<commit_msg>Remove has_key to ensure python3 compatibility<commit_after>
|
import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if key in data:
data[key] = "[FILTERED]"
return data
|
import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if data.has_key(key):
data[key] = "[FILTERED]"
return data
Remove has_key to ensure python3 compatibilityimport json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if key in data:
data[key] = "[FILTERED]"
return data
|
<commit_before>import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if data.has_key(key):
data[key] = "[FILTERED]"
return data
<commit_msg>Remove has_key to ensure python3 compatibility<commit_after>import json
class StringReprJSONEncoder(json.JSONEncoder):
def default(self, o):
try:
return repr(o)
except:
return '[unserializable]'
def filter_dict(data, filter_keys):
# filter_keys = set(data.keys())
for key in filter_keys:
if key in data:
data[key] = "[FILTERED]"
return data
|
4298cb6ccaac055a4a8db250dc6143b37870edd6
|
openacademy/model/openacademy_session.py
|
openacademy/model/openacademy_session.py
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
Add domain or and ilike
|
[REF] openacademy: Add domain or and ilike
|
Python
|
apache-2.0
|
Hiregui92/openacademy-project
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
[REF] openacademy: Add domain or and ilike
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
<commit_before>from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
<commit_msg>[REF] openacademy: Add domain or and ilike<commit_after>
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
[REF] openacademy: Add domain or and ilikefrom openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
<commit_before>from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
<commit_msg>[REF] openacademy: Add domain or and ilike<commit_after>from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="Instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
e68cb906810a26d93e0d15e0357a75a2b49d8784
|
boundary/plugin_get_components.py
|
boundary/plugin_get_components.py
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
|
Reformat code to PEP-8 standards
|
Reformat code to PEP-8 standards
|
Python
|
apache-2.0
|
jdgwartney/boundary-api-cli,boundary/pulse-api-cli,wcainboundary/boundary-api-cli,jdgwartney/pulse-api-cli,jdgwartney/boundary-api-cli,boundary/boundary-api-cli,boundary/pulse-api-cli,boundary/boundary-api-cli,wcainboundary/boundary-api-cli,jdgwartney/pulse-api-cli
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
Reformat code to PEP-8 standards
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
|
<commit_before>#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
<commit_msg>Reformat code to PEP-8 standards<commit_after>
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
|
#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
Reformat code to PEP-8 standards#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
|
<commit_before>#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents (ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path="v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName',action='store',required=True,help='Plugin name')
def getArguments(self):
'''
Extracts the specific arguments of this CLI
'''
ApiCli.getArguments(self)
if self.args.pluginName != None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
<commit_msg>Reformat code to PEP-8 standards<commit_after>#
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from boundary import ApiCli
class PluginGetComponents(ApiCli):
def __init__(self):
ApiCli.__init__(self)
self.method = "GET"
self.path = "v1/plugins"
self.pluginName = None
def addArguments(self):
ApiCli.addArguments(self)
self.parser.add_argument('-n', '--plugin-Name', dest='pluginName', action='store', metavar='plugin_name',
required=True, help='Plugin name')
def getArguments(self):
"""
Extracts the specific arguments of this CLI
"""
ApiCli.getArguments(self)
if self.args.pluginName is not None:
self.pluginName = self.args.pluginName
self.path = "v1/plugins/{0}/components".format(self.pluginName)
def getDescription(self):
return "Get the components of a plugin in a Boundary account"
|
e907ec909b6dc3eecc83b634a48a04b53f1740a7
|
nhlstats/__init__.py
|
nhlstats/__init__.py
|
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
|
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True, beginning=None, end=None):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games. beginning and end allow you set a range
for the search, with no end indicating until the time.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
|
Add beginning and end args to GetGames
|
Add beginning and end args to GetGames
|
Python
|
mit
|
fancystats/nhlstats
|
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
Add beginning and end args to GetGames
|
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True, beginning=None, end=None):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games. beginning and end allow you set a range
for the search, with no end indicating until the time.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
|
<commit_before>
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
<commit_msg>Add beginning and end args to GetGames<commit_after>
|
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True, beginning=None, end=None):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games. beginning and end allow you set a range
for the search, with no end indicating until the time.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
|
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
Add beginning and end args to GetGames
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True, beginning=None, end=None):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games. beginning and end allow you set a range
for the search, with no end indicating until the time.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
|
<commit_before>
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
<commit_msg>Add beginning and end args to GetGames<commit_after>
import logging
from version import __version__
logger = logging.getLogger(__name__)
logger.debug('Loading %s ver %s' % (__name__, __version__))
# Actions represents the available textual items that can be passed to main
# to drive dispatch. These should be all lower case, no spaces or underscores.
actions = [
'collect',
'update',
'testignore', # Allows the bin app to be run without calling into here.
]
def GetDataForGame(game):
pass
def GetDataForGames(games=[]):
for game in games:
GetDataForGame(game)
def GetGames(active=True, beginning=None, end=None):
"""
Return a tuple of games. Updates gets finished games to check for updated stats,
if False (default) it returns active games. beginning and end allow you set a range
for the search, with no end indicating until the time.
"""
def main(action='collect'):
"""
The main entry point for the application
"""
logger.debug('Dispatching action %s' % action)
# By default, we collect info on current games
if action == 'collect':
GetDataForGames(GetGames(active=True))
# Otherwise we can look to update finished games
elif action == 'update':
GetDataForGames(GetGames(active=False))
elif action in actions:
raise NotImplementedError('Action "%s" is known, but not (yet?) implemented' % action)
else:
raise ValueError('Unknown action "%s"' % action)
|
f7341acf0717d238073a688c6047e18b524efab1
|
qmpy/configuration/resources/__init__.py
|
qmpy/configuration/resources/__init__.py
|
import yaml
import os, os.path
loc = os.path.dirname(os.path.abspath(__file__))
hosts = yaml.load(open(loc+'/hosts.yml'))
projects = yaml.load(open(loc+'/projects.yml'))
allocations = yaml.load(open(loc+'/allocations.yml'))
users = yaml.load(open(loc+'/users.yml'))
|
import yaml
import os
loc = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
hosts = yaml.load(fr)
with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
projects = yaml.load(fr)
with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
allocations = yaml.load(fr)
with open(os.path.join(loc, 'users.yml'), 'r') as fr:
users = yaml.load(fr)
|
Use OS-agnostic path joining operations
|
Use OS-agnostic path joining operations
|
Python
|
mit
|
wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy,wolverton-research-group/qmpy
|
import yaml
import os, os.path
loc = os.path.dirname(os.path.abspath(__file__))
hosts = yaml.load(open(loc+'/hosts.yml'))
projects = yaml.load(open(loc+'/projects.yml'))
allocations = yaml.load(open(loc+'/allocations.yml'))
users = yaml.load(open(loc+'/users.yml'))
Use OS-agnostic path joining operations
|
import yaml
import os
loc = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
hosts = yaml.load(fr)
with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
projects = yaml.load(fr)
with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
allocations = yaml.load(fr)
with open(os.path.join(loc, 'users.yml'), 'r') as fr:
users = yaml.load(fr)
|
<commit_before>import yaml
import os, os.path
loc = os.path.dirname(os.path.abspath(__file__))
hosts = yaml.load(open(loc+'/hosts.yml'))
projects = yaml.load(open(loc+'/projects.yml'))
allocations = yaml.load(open(loc+'/allocations.yml'))
users = yaml.load(open(loc+'/users.yml'))
<commit_msg>Use OS-agnostic path joining operations<commit_after>
|
import yaml
import os
loc = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
hosts = yaml.load(fr)
with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
projects = yaml.load(fr)
with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
allocations = yaml.load(fr)
with open(os.path.join(loc, 'users.yml'), 'r') as fr:
users = yaml.load(fr)
|
import yaml
import os, os.path
loc = os.path.dirname(os.path.abspath(__file__))
hosts = yaml.load(open(loc+'/hosts.yml'))
projects = yaml.load(open(loc+'/projects.yml'))
allocations = yaml.load(open(loc+'/allocations.yml'))
users = yaml.load(open(loc+'/users.yml'))
Use OS-agnostic path joining operationsimport yaml
import os
loc = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
hosts = yaml.load(fr)
with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
projects = yaml.load(fr)
with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
allocations = yaml.load(fr)
with open(os.path.join(loc, 'users.yml'), 'r') as fr:
users = yaml.load(fr)
|
<commit_before>import yaml
import os, os.path
loc = os.path.dirname(os.path.abspath(__file__))
hosts = yaml.load(open(loc+'/hosts.yml'))
projects = yaml.load(open(loc+'/projects.yml'))
allocations = yaml.load(open(loc+'/allocations.yml'))
users = yaml.load(open(loc+'/users.yml'))
<commit_msg>Use OS-agnostic path joining operations<commit_after>import yaml
import os
loc = os.path.dirname(os.path.abspath(__file__))
with open(os.path.join(loc, 'hosts.yml'), 'r') as fr:
hosts = yaml.load(fr)
with open(os.path.join(loc, 'projects.yml'), 'r') as fr:
projects = yaml.load(fr)
with open(os.path.join(loc, 'allocations.yml'), 'r') as fr:
allocations = yaml.load(fr)
with open(os.path.join(loc, 'users.yml'), 'r') as fr:
users = yaml.load(fr)
|
6ca50d8b5c208a2063910832df5d9a07301b6893
|
homeassistant/components/device_tracker/owntracks.py
|
homeassistant/components/device_tracker/owntracks.py
|
"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
device_tracker:
platform: owntracks
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
|
"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.owntracks.html
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
|
Move configuration details to docs
|
Move configuration details to docs
|
Python
|
mit
|
MungoRae/home-assistant,varunr047/homefile,LinuxChristian/home-assistant,srcLurker/home-assistant,molobrakos/home-assistant,sffjunkie/home-assistant,sfam/home-assistant,Zac-HD/home-assistant,tboyce021/home-assistant,aoakeson/home-assistant,caiuspb/home-assistant,Zyell/home-assistant,bdfoster/blumate,nnic/home-assistant,happyleavesaoc/home-assistant,balloob/home-assistant,jamespcole/home-assistant,hmronline/home-assistant,leppa/home-assistant,hexxter/home-assistant,fbradyirl/home-assistant,robbiet480/home-assistant,ma314smith/home-assistant,MungoRae/home-assistant,shaftoe/home-assistant,morphis/home-assistant,betrisey/home-assistant,HydrelioxGitHub/home-assistant,sffjunkie/home-assistant,tchellomello/home-assistant,joopert/home-assistant,coteyr/home-assistant,hmronline/home-assistant,pottzer/home-assistant,adrienbrault/home-assistant,tboyce1/home-assistant,auduny/home-assistant,shaftoe/home-assistant,philipbl/home-assistant,eagleamon/home-assistant,ma314smith/home-assistant,dmeulen/home-assistant,tchellomello/home-assistant,nkgilley/home-assistant,morphis/home-assistant,oandrew/home-assistant,srcLurker/home-assistant,PetePriority/home-assistant,pschmitt/home-assistant,jamespcole/home-assistant,stefan-jonasson/home-assistant,miniconfig/home-assistant,nnic/home-assistant,aequitas/home-assistant,instantchow/home-assistant,ewandor/home-assistant,pottzer/home-assistant,Smart-Torvy/torvy-home-assistant,LinuxChristian/home-assistant,aequitas/home-assistant,keerts/home-assistant,philipbl/home-assistant,oandrew/home-assistant,Smart-Torvy/torvy-home-assistant,kyvinh/home-assistant,deisi/home-assistant,soldag/home-assistant,srcLurker/home-assistant,hexxter/home-assistant,GenericStudent/home-assistant,mKeRix/home-assistant,jawilson/home-assistant,Duoxilian/home-assistant,oandrew/home-assistant,nevercast/home-assistant,hexxter/home-assistant,sander76/home-assistant,Theb-1/home-assistant,varunr047/homefile,xifle/home-assistant,qedi-r/home-assistant,mikaelboman/home-assistant,keerts/home-assistant,Smart-Torvy/torvy-home-assistant,florianholzapfel/home-assistant,morphis/home-assistant,kyvinh/home-assistant,nugget/home-assistant,tinloaf/home-assistant,rohitranjan1991/home-assistant,bdfoster/blumate,philipbl/home-assistant,MungoRae/home-assistant,philipbl/home-assistant,DavidLP/home-assistant,coteyr/home-assistant,sdague/home-assistant,stefan-jonasson/home-assistant,instantchow/home-assistant,Cinntax/home-assistant,qedi-r/home-assistant,Theb-1/home-assistant,justyns/home-assistant,open-homeautomation/home-assistant,Theb-1/home-assistant,balloob/home-assistant,auduny/home-assistant,luxus/home-assistant,persandstrom/home-assistant,lukas-hetzenecker/home-assistant,caiuspb/home-assistant,mKeRix/home-assistant,MartinHjelmare/home-assistant,ma314smith/home-assistant,PetePriority/home-assistant,sander76/home-assistant,fbradyirl/home-assistant,luxus/home-assistant,jaharkes/home-assistant,partofthething/home-assistant,auduny/home-assistant,open-homeautomation/home-assistant,keerts/home-assistant,pottzer/home-assistant,tinloaf/home-assistant,turbokongen/home-assistant,dmeulen/home-assistant,sffjunkie/home-assistant,happyleavesaoc/home-assistant,JshWright/home-assistant,MartinHjelmare/home-assistant,toddeye/home-assistant,miniconfig/home-assistant,ma314smith/home-assistant,jnewland/home-assistant,mKeRix/home-assistant,robjohnson189/home-assistant,badele/home-assistant,devdelay/home-assistant,tboyce1/home-assistant,stefan-jonasson/home-assistant,xifle/home-assistant,aronsky/home-assistant,emilhetty/home-assistant,home-assistant/home-assistant,devdelay/home-assistant,mikaelboman/home-assistant,jabesq/home-assistant,deisi/home-assistant,sffjunkie/home-assistant,ct-23/home-assistant,ct-23/home-assistant,nevercast/home-assistant,kennedyshead/home-assistant,joopert/home-assistant,keerts/home-assistant,titilambert/home-assistant,srcLurker/home-assistant,titilambert/home-assistant,aronsky/home-assistant,alexmogavero/home-assistant,PetePriority/home-assistant,jnewland/home-assistant,jaharkes/home-assistant,Julian/home-assistant,oandrew/home-assistant,sdague/home-assistant,florianholzapfel/home-assistant,mikaelboman/home-assistant,stefan-jonasson/home-assistant,happyleavesaoc/home-assistant,justyns/home-assistant,MungoRae/home-assistant,Teagan42/home-assistant,leoc/home-assistant,jabesq/home-assistant,sfam/home-assistant,ct-23/home-assistant,jamespcole/home-assistant,nugget/home-assistant,kennedyshead/home-assistant,bdfoster/blumate,emilhetty/home-assistant,emilhetty/home-assistant,home-assistant/home-assistant,open-homeautomation/home-assistant,jnewland/home-assistant,fbradyirl/home-assistant,deisi/home-assistant,xifle/home-assistant,hmronline/home-assistant,Zyell/home-assistant,molobrakos/home-assistant,hexxter/home-assistant,Cinntax/home-assistant,alexmogavero/home-assistant,Julian/home-assistant,tboyce021/home-assistant,leppa/home-assistant,shaftoe/home-assistant,tboyce1/home-assistant,mezz64/home-assistant,Duoxilian/home-assistant,ct-23/home-assistant,leoc/home-assistant,nevercast/home-assistant,emilhetty/home-assistant,w1ll1am23/home-assistant,dmeulen/home-assistant,mKeRix/home-assistant,sffjunkie/home-assistant,DavidLP/home-assistant,caiuspb/home-assistant,mezz64/home-assistant,Zac-HD/home-assistant,betrisey/home-assistant,ct-23/home-assistant,ewandor/home-assistant,coteyr/home-assistant,open-homeautomation/home-assistant,molobrakos/home-assistant,ewandor/home-assistant,varunr047/homefile,turbokongen/home-assistant,nugget/home-assistant,nkgilley/home-assistant,jawilson/home-assistant,Danielhiversen/home-assistant,Zac-HD/home-assistant,eagleamon/home-assistant,happyleavesaoc/home-assistant,deisi/home-assistant,kyvinh/home-assistant,nnic/home-assistant,aequitas/home-assistant,instantchow/home-assistant,Zac-HD/home-assistant,robjohnson189/home-assistant,kyvinh/home-assistant,emilhetty/home-assistant,varunr047/homefile,Zyell/home-assistant,Julian/home-assistant,partofthething/home-assistant,xifle/home-assistant,alexmogavero/home-assistant,robjohnson189/home-assistant,robbiet480/home-assistant,leoc/home-assistant,JshWright/home-assistant,JshWright/home-assistant,eagleamon/home-assistant,eagleamon/home-assistant,w1ll1am23/home-assistant,robjohnson189/home-assistant,JshWright/home-assistant,miniconfig/home-assistant,bdfoster/blumate,miniconfig/home-assistant,betrisey/home-assistant,DavidLP/home-assistant,Teagan42/home-assistant,tboyce1/home-assistant,HydrelioxGitHub/home-assistant,LinuxChristian/home-assistant,tinloaf/home-assistant,florianholzapfel/home-assistant,varunr047/homefile,shaftoe/home-assistant,Smart-Torvy/torvy-home-assistant,leoc/home-assistant,balloob/home-assistant,badele/home-assistant,Duoxilian/home-assistant,GenericStudent/home-assistant,florianholzapfel/home-assistant,MungoRae/home-assistant,bdfoster/blumate,postlund/home-assistant,Julian/home-assistant,deisi/home-assistant,jaharkes/home-assistant,betrisey/home-assistant,aoakeson/home-assistant,persandstrom/home-assistant,adrienbrault/home-assistant,LinuxChristian/home-assistant,devdelay/home-assistant,devdelay/home-assistant,Duoxilian/home-assistant,soldag/home-assistant,alexmogavero/home-assistant,rohitranjan1991/home-assistant,jaharkes/home-assistant,hmronline/home-assistant,FreekingDean/home-assistant,FreekingDean/home-assistant,mikaelboman/home-assistant,rohitranjan1991/home-assistant,dmeulen/home-assistant,hmronline/home-assistant,MartinHjelmare/home-assistant,postlund/home-assistant,persandstrom/home-assistant,luxus/home-assistant,Danielhiversen/home-assistant,justyns/home-assistant,aoakeson/home-assistant,morphis/home-assistant,badele/home-assistant,jabesq/home-assistant,LinuxChristian/home-assistant,mikaelboman/home-assistant,lukas-hetzenecker/home-assistant,toddeye/home-assistant,HydrelioxGitHub/home-assistant,sfam/home-assistant,pschmitt/home-assistant
|
"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
device_tracker:
platform: owntracks
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
Move configuration details to docs
|
"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.owntracks.html
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
|
<commit_before>"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
device_tracker:
platform: owntracks
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
<commit_msg>Move configuration details to docs<commit_after>
|
"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.owntracks.html
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
|
"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
device_tracker:
platform: owntracks
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
Move configuration details to docs"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.owntracks.html
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
|
<commit_before>"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
device_tracker:
platform: owntracks
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
<commit_msg>Move configuration details to docs<commit_after>"""
homeassistant.components.device_tracker.owntracks
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
OwnTracks platform for the device tracker.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.owntracks.html
"""
import json
import logging
import homeassistant.components.mqtt as mqtt
DEPENDENCIES = ['mqtt']
LOCATION_TOPIC = 'owntracks/+/+'
def setup_scanner(hass, config, see):
""" Set up a OwnTracksks tracker. """
def owntracks_location_update(topic, payload, qos):
""" MQTT message received. """
# Docs on available data:
# http://owntracks.org/booklet/tech/json/#_typelocation
try:
data = json.loads(payload)
except ValueError:
# If invalid JSON
logging.getLogger(__name__).error(
'Unable to parse payload as JSON: %s', payload)
return
if not isinstance(data, dict) or data.get('_type') != 'location':
return
parts = topic.split('/')
kwargs = {
'dev_id': '{}_{}'.format(parts[1], parts[2]),
'host_name': parts[1],
'gps': (data['lat'], data['lon']),
}
if 'acc' in data:
kwargs['gps_accuracy'] = data['acc']
if 'batt' in data:
kwargs['battery'] = data['batt']
see(**kwargs)
mqtt.subscribe(hass, LOCATION_TOPIC, owntracks_location_update, 1)
return True
|
19783c164910cec1d0d9e68a3ce62cb8874fa605
|
salt/ext/__init__.py
|
salt/ext/__init__.py
|
# coding: utf-8 -*-
|
# coding: utf-8 -*-
'''
This directory contains external modules shipping with Salt. They are governed
under their respective licenses. See the COPYING file included with this
distribution for more information.
'''
|
Add a note explaining salt/ext
|
Add a note explaining salt/ext
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# coding: utf-8 -*-
Add a note explaining salt/ext
|
# coding: utf-8 -*-
'''
This directory contains external modules shipping with Salt. They are governed
under their respective licenses. See the COPYING file included with this
distribution for more information.
'''
|
<commit_before># coding: utf-8 -*-
<commit_msg>Add a note explaining salt/ext<commit_after>
|
# coding: utf-8 -*-
'''
This directory contains external modules shipping with Salt. They are governed
under their respective licenses. See the COPYING file included with this
distribution for more information.
'''
|
# coding: utf-8 -*-
Add a note explaining salt/ext# coding: utf-8 -*-
'''
This directory contains external modules shipping with Salt. They are governed
under their respective licenses. See the COPYING file included with this
distribution for more information.
'''
|
<commit_before># coding: utf-8 -*-
<commit_msg>Add a note explaining salt/ext<commit_after># coding: utf-8 -*-
'''
This directory contains external modules shipping with Salt. They are governed
under their respective licenses. See the COPYING file included with this
distribution for more information.
'''
|
3cc2dd83b44979c2dee3946e3d01ca236d3339ff
|
src/trusted/service_runtime/linux/nacl_bootstrap_munge_phdr.py
|
src/trusted/service_runtime/linux/nacl_bootstrap_munge_phdr.py
|
#!/usr/bin/python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# This takes three command-line arguments:
# MUNGE-PHDR-PROGRAM file name of program built from
# nacl_helper_bootstrap_munge_phdr.c
# INFILE raw linked ELF file name
# OUTFILE output file name
#
# We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
# That modifies the file in place. Then we move it to OUTFILE.
#
# We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
# wants to modify a file in place (and it would be a much longer and more
# fragile program if it created a fresh ELF output file instead).
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
sys.exit(1)
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
if __name__ == '__main__':
Main(sys.argv)
|
#!/usr/bin/env python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This takes three command-line arguments:
MUNGE-PHDR-PROGRAM file name of program built from
nacl_helper_bootstrap_munge_phdr.c
INFILE raw linked ELF file name
OUTFILE output file name
We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
That modifies the file in place. Then we move it to OUTFILE.
We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
wants to modify a file in place (and it would be a much longer and more
fragile program if it created a fresh ELF output file instead).
"""
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
return 1
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
Move over tweaks to munge-phdr script from chromium repo
|
Move over tweaks to munge-phdr script from chromium repo
Some cosmetic changes were made on the chromium side since we copied it.
Catch up to those, still preparing to remove the chromium copy ASAP.
BUG= none
TEST= trybots
R=bradchen@google.com
Review URL: http://codereview.chromium.org/8728008
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@7312 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2
|
Python
|
bsd-3-clause
|
nacl-webkit/native_client,sbc100/native_client,sbc100/native_client,nacl-webkit/native_client,nacl-webkit/native_client,sbc100/native_client,sbc100/native_client,sbc100/native_client,nacl-webkit/native_client,sbc100/native_client,nacl-webkit/native_client
|
#!/usr/bin/python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# This takes three command-line arguments:
# MUNGE-PHDR-PROGRAM file name of program built from
# nacl_helper_bootstrap_munge_phdr.c
# INFILE raw linked ELF file name
# OUTFILE output file name
#
# We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
# That modifies the file in place. Then we move it to OUTFILE.
#
# We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
# wants to modify a file in place (and it would be a much longer and more
# fragile program if it created a fresh ELF output file instead).
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
sys.exit(1)
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
if __name__ == '__main__':
Main(sys.argv)
Move over tweaks to munge-phdr script from chromium repo
Some cosmetic changes were made on the chromium side since we copied it.
Catch up to those, still preparing to remove the chromium copy ASAP.
BUG= none
TEST= trybots
R=bradchen@google.com
Review URL: http://codereview.chromium.org/8728008
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@7312 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2
|
#!/usr/bin/env python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This takes three command-line arguments:
MUNGE-PHDR-PROGRAM file name of program built from
nacl_helper_bootstrap_munge_phdr.c
INFILE raw linked ELF file name
OUTFILE output file name
We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
That modifies the file in place. Then we move it to OUTFILE.
We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
wants to modify a file in place (and it would be a much longer and more
fragile program if it created a fresh ELF output file instead).
"""
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
return 1
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# This takes three command-line arguments:
# MUNGE-PHDR-PROGRAM file name of program built from
# nacl_helper_bootstrap_munge_phdr.c
# INFILE raw linked ELF file name
# OUTFILE output file name
#
# We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
# That modifies the file in place. Then we move it to OUTFILE.
#
# We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
# wants to modify a file in place (and it would be a much longer and more
# fragile program if it created a fresh ELF output file instead).
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
sys.exit(1)
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
if __name__ == '__main__':
Main(sys.argv)
<commit_msg>Move over tweaks to munge-phdr script from chromium repo
Some cosmetic changes were made on the chromium side since we copied it.
Catch up to those, still preparing to remove the chromium copy ASAP.
BUG= none
TEST= trybots
R=bradchen@google.com
Review URL: http://codereview.chromium.org/8728008
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@7312 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This takes three command-line arguments:
MUNGE-PHDR-PROGRAM file name of program built from
nacl_helper_bootstrap_munge_phdr.c
INFILE raw linked ELF file name
OUTFILE output file name
We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
That modifies the file in place. Then we move it to OUTFILE.
We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
wants to modify a file in place (and it would be a much longer and more
fragile program if it created a fresh ELF output file instead).
"""
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
return 1
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
#!/usr/bin/python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# This takes three command-line arguments:
# MUNGE-PHDR-PROGRAM file name of program built from
# nacl_helper_bootstrap_munge_phdr.c
# INFILE raw linked ELF file name
# OUTFILE output file name
#
# We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
# That modifies the file in place. Then we move it to OUTFILE.
#
# We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
# wants to modify a file in place (and it would be a much longer and more
# fragile program if it created a fresh ELF output file instead).
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
sys.exit(1)
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
if __name__ == '__main__':
Main(sys.argv)
Move over tweaks to munge-phdr script from chromium repo
Some cosmetic changes were made on the chromium side since we copied it.
Catch up to those, still preparing to remove the chromium copy ASAP.
BUG= none
TEST= trybots
R=bradchen@google.com
Review URL: http://codereview.chromium.org/8728008
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@7312 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2#!/usr/bin/env python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This takes three command-line arguments:
MUNGE-PHDR-PROGRAM file name of program built from
nacl_helper_bootstrap_munge_phdr.c
INFILE raw linked ELF file name
OUTFILE output file name
We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
That modifies the file in place. Then we move it to OUTFILE.
We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
wants to modify a file in place (and it would be a much longer and more
fragile program if it created a fresh ELF output file instead).
"""
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
return 1
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# This takes three command-line arguments:
# MUNGE-PHDR-PROGRAM file name of program built from
# nacl_helper_bootstrap_munge_phdr.c
# INFILE raw linked ELF file name
# OUTFILE output file name
#
# We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
# That modifies the file in place. Then we move it to OUTFILE.
#
# We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
# wants to modify a file in place (and it would be a much longer and more
# fragile program if it created a fresh ELF output file instead).
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
sys.exit(1)
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
if __name__ == '__main__':
Main(sys.argv)
<commit_msg>Move over tweaks to munge-phdr script from chromium repo
Some cosmetic changes were made on the chromium side since we copied it.
Catch up to those, still preparing to remove the chromium copy ASAP.
BUG= none
TEST= trybots
R=bradchen@google.com
Review URL: http://codereview.chromium.org/8728008
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@7312 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2<commit_after>#!/usr/bin/env python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This takes three command-line arguments:
MUNGE-PHDR-PROGRAM file name of program built from
nacl_helper_bootstrap_munge_phdr.c
INFILE raw linked ELF file name
OUTFILE output file name
We just run the MUNGE-PHDR-PROGRAM on a copy of INFILE.
That modifies the file in place. Then we move it to OUTFILE.
We only have this wrapper script because nacl_helper_bootstrap_munge_phdr.c
wants to modify a file in place (and it would be a much longer and more
fragile program if it created a fresh ELF output file instead).
"""
import shutil
import subprocess
import sys
def Main(argv):
if len(argv) != 4:
print 'Usage: %s MUNGE-PHDR-PROGRAM INFILE OUTFILE' % argv[0]
return 1
[prog, munger, infile, outfile] = argv
tmpfile = outfile + '.tmp'
shutil.copy(infile, tmpfile)
segment_num = '2'
subprocess.check_call([munger, tmpfile, segment_num])
shutil.move(tmpfile, outfile)
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
eb9d9196155e90c4949380c66ff8876f41bccc01
|
tomviz/python/tomviz/io/formats/numpy.py
|
tomviz/python/tomviz/io/formats/numpy.py
|
# -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
|
# -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
# Switch to row major order for NPY stores
data = data.reshape(data.shape[::-1])
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
# NPY stores data as row major order. VTK expects column major order.
data = data.reshape(data.shape[::-1])
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
|
Use C ordering for npy format
|
Use C ordering for npy format
This appears to save and and load files correctly, but when
loading, it prints this message:
Warning, array does not have Fortran order,
making deep copy and fixing...
...done.
I'm looking into this...
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com>
|
Python
|
bsd-3-clause
|
OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz
|
# -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
Use C ordering for npy format
This appears to save and and load files correctly, but when
loading, it prints this message:
Warning, array does not have Fortran order,
making deep copy and fixing...
...done.
I'm looking into this...
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com>
|
# -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
# Switch to row major order for NPY stores
data = data.reshape(data.shape[::-1])
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
# NPY stores data as row major order. VTK expects column major order.
data = data.reshape(data.shape[::-1])
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
|
<commit_before># -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
<commit_msg>Use C ordering for npy format
This appears to save and and load files correctly, but when
loading, it prints this message:
Warning, array does not have Fortran order,
making deep copy and fixing...
...done.
I'm looking into this...
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com><commit_after>
|
# -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
# Switch to row major order for NPY stores
data = data.reshape(data.shape[::-1])
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
# NPY stores data as row major order. VTK expects column major order.
data = data.reshape(data.shape[::-1])
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
|
# -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
Use C ordering for npy format
This appears to save and and load files correctly, but when
loading, it prints this message:
Warning, array does not have Fortran order,
making deep copy and fixing...
...done.
I'm looking into this...
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com># -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
# Switch to row major order for NPY stores
data = data.reshape(data.shape[::-1])
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
# NPY stores data as row major order. VTK expects column major order.
data = data.reshape(data.shape[::-1])
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
|
<commit_before># -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
<commit_msg>Use C ordering for npy format
This appears to save and and load files correctly, but when
loading, it prints this message:
Warning, array does not have Fortran order,
making deep copy and fixing...
...done.
I'm looking into this...
Signed-off-by: Patrick Avery <743342299f279e7a8c3ff5eb40671fce3e95f13a@kitware.com><commit_after># -*- coding: utf-8 -*-
###############################################################################
# This source file is part of the Tomviz project, https://tomviz.org/.
# It is released under the 3-Clause BSD License, see "LICENSE".
###############################################################################
import numpy as np
from tomviz.io import FileType, IOBase, Reader, Writer
import tomviz.utils
from vtk import vtkImageData
class NumpyBase(IOBase):
@staticmethod
def file_type():
return FileType('NumPy binary format', ['npy'])
class NumpyWriter(Writer, NumpyBase):
def write(self, path, data_object):
data = tomviz.utils.get_array(data_object)
# Switch to row major order for NPY stores
data = data.reshape(data.shape[::-1])
with open(path, "wb") as f:
np.save(f, data)
class NumpyReader(Reader, NumpyBase):
def read(self, path):
with open(path, "rb") as f:
data = np.load(f)
if len(data.shape) != 3:
return vtkImageData()
# NPY stores data as row major order. VTK expects column major order.
data = data.reshape(data.shape[::-1])
image_data = vtkImageData()
(x, y, z) = data.shape
image_data.SetOrigin(0, 0, 0)
image_data.SetSpacing(1, 1, 1)
image_data.SetExtent(0, x - 1, 0, y - 1, 0, z - 1)
tomviz.utils.set_array(image_data, data)
return image_data
|
4647526dd416a7e9e3b3d6b2b0b1876e86266743
|
django_hosts/tests/urls/simple.py
|
django_hosts/tests/urls/simple.py
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', 'direct_to_template', name='simple-direct'),
)
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', TemplateView.as_view(), name='simple-direct'),
)
|
Change direct_to_template view to TemplateView.as_view()
|
Change direct_to_template view to TemplateView.as_view()
Required for Django 1.5 compatibility during tests
|
Python
|
bsd-3-clause
|
jezdez/django-hosts
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', 'direct_to_template', name='simple-direct'),
)
Change direct_to_template view to TemplateView.as_view()
Required for Django 1.5 compatibility during tests
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', TemplateView.as_view(), name='simple-direct'),
)
|
<commit_before>from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', 'direct_to_template', name='simple-direct'),
)
<commit_msg>Change direct_to_template view to TemplateView.as_view()
Required for Django 1.5 compatibility during tests<commit_after>
|
from django.conf.urls.defaults import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', TemplateView.as_view(), name='simple-direct'),
)
|
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', 'direct_to_template', name='simple-direct'),
)
Change direct_to_template view to TemplateView.as_view()
Required for Django 1.5 compatibility during testsfrom django.conf.urls.defaults import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', TemplateView.as_view(), name='simple-direct'),
)
|
<commit_before>from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', 'direct_to_template', name='simple-direct'),
)
<commit_msg>Change direct_to_template view to TemplateView.as_view()
Required for Django 1.5 compatibility during tests<commit_after>from django.conf.urls.defaults import patterns, url
from django.views.generic import TemplateView
urlpatterns = patterns('django.views.generic.simple',
url(r'^simple/$', TemplateView.as_view(), name='simple-direct'),
)
|
73c616cc9e3d5351e0f4e41d60ff03bd58b85967
|
scrapi/harvesters/scholarsbank.py
|
scrapi/harvesters/scholarsbank.py
|
"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
|
"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.schemas import OAISCHEMA
from scrapi.base.helpers import updated_schema
def second_result(des):
return des[1] if len(des) > 1 else des[0] if des else ''
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
schema = updated_schema(OAISCHEMA, {
'description': ('//dc:description/node()', second_result)
})
|
Update schoalrsbank to grab second description if there are two
|
Update schoalrsbank to grab second description if there are two
|
Python
|
apache-2.0
|
fabianvf/scrapi,jeffreyliu3230/scrapi,erinspace/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,alexgarciac/scrapi,CenterForOpenScience/scrapi,ostwald/scrapi,felliott/scrapi,mehanig/scrapi,erinspace/scrapi,mehanig/scrapi,fabianvf/scrapi
|
"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
Update schoalrsbank to grab second description if there are two
|
"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.schemas import OAISCHEMA
from scrapi.base.helpers import updated_schema
def second_result(des):
return des[1] if len(des) > 1 else des[0] if des else ''
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
schema = updated_schema(OAISCHEMA, {
'description': ('//dc:description/node()', second_result)
})
|
<commit_before>"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
<commit_msg>Update schoalrsbank to grab second description if there are two<commit_after>
|
"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.schemas import OAISCHEMA
from scrapi.base.helpers import updated_schema
def second_result(des):
return des[1] if len(des) > 1 else des[0] if des else ''
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
schema = updated_schema(OAISCHEMA, {
'description': ('//dc:description/node()', second_result)
})
|
"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
Update schoalrsbank to grab second description if there are two"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.schemas import OAISCHEMA
from scrapi.base.helpers import updated_schema
def second_result(des):
return des[1] if len(des) > 1 else des[0] if des else ''
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
schema = updated_schema(OAISCHEMA, {
'description': ('//dc:description/node()', second_result)
})
|
<commit_before>"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
<commit_msg>Update schoalrsbank to grab second description if there are two<commit_after>"""
Harvester for Scholars Bank University of Oregon for the SHARE project
Example API call: http://scholarsbank.uoregon.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
"""
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.schemas import OAISCHEMA
from scrapi.base.helpers import updated_schema
def second_result(des):
return des[1] if len(des) > 1 else des[0] if des else ''
class ScholarsbankHarvester(OAIHarvester):
short_name = 'scholarsbank'
long_name = 'Scholars Bank University of Oregon'
url = 'http://scholarsbank.uoregon.edu'
timezone_granularity = True
base_url = 'http://scholarsbank.uoregon.edu/oai/request'
property_list = [
'type', 'source', 'format', 'relation',
'date', 'description', 'setSpec', 'identifier'
]
schema = updated_schema(OAISCHEMA, {
'description': ('//dc:description/node()', second_result)
})
|
150aa84158bab89e3700114038fab78504bed960
|
zou/app/blueprints/export/csv/persons.py
|
zou/app/blueprints/export/csv/persons.py
|
from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
]
|
from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role", "Active"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
active = "yes"
if not person.active:
active = "no"
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
active
]
|
Add active column to person csv export
|
Add active column to person csv export
|
Python
|
agpl-3.0
|
cgwire/zou
|
from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
]
Add active column to person csv export
|
from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role", "Active"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
active = "yes"
if not person.active:
active = "no"
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
active
]
|
<commit_before>from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
]
<commit_msg>Add active column to person csv export<commit_after>
|
from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role", "Active"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
active = "yes"
if not person.active:
active = "no"
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
active
]
|
from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
]
Add active column to person csv exportfrom zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role", "Active"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
active = "yes"
if not person.active:
active = "no"
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
active
]
|
<commit_before>from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
]
<commit_msg>Add active column to person csv export<commit_after>from zou.app.blueprints.export.csv.base import BaseCsvExport
from zou.app.models.person import Person
class PersonsCsvExport(BaseCsvExport):
def __init__(self):
BaseCsvExport.__init__(self, Person)
self.file_name = "people_export"
def build_headers(self):
return ["Last Name", "First Name", "Email", "Phone", "Role", "Active"]
def build_query(self):
return self.model.query.order_by(Person.last_name, Person.first_name)
def build_row(self, person):
active = "yes"
if not person.active:
active = "no"
return [
person.last_name,
person.first_name,
person.email,
person.phone,
person.role,
active
]
|
69d15ec69330828da7eb96591ca674b06c6f9017
|
fiware-region-sanity-tests/tests/regions/test_waterford.py
|
fiware-region-sanity-tests/tests/regions/test_waterford.py
|
# -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests, fiware_region_object_storage_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest,
fiware_region_object_storage_tests.FiwareRegionsObjectStorageTests):
region_name = "Waterford"
|
# -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest):
region_name = "Waterford"
|
Disable the Object Storage tests in Waterford
|
Disable the Object Storage tests in Waterford
|
Python
|
apache-2.0
|
Fiware/ops.Health,telefonicaid/fiware-health,Fiware/ops.Health,telefonicaid/fiware-health,telefonicaid/fiware-health,telefonicaid/fiware-health,Fiware/ops.Health,telefonicaid/fiware-health,Fiware/ops.Health,Fiware/ops.Health
|
# -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests, fiware_region_object_storage_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest,
fiware_region_object_storage_tests.FiwareRegionsObjectStorageTests):
region_name = "Waterford"
Disable the Object Storage tests in Waterford
|
# -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest):
region_name = "Waterford"
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests, fiware_region_object_storage_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest,
fiware_region_object_storage_tests.FiwareRegionsObjectStorageTests):
region_name = "Waterford"
<commit_msg>Disable the Object Storage tests in Waterford<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest):
region_name = "Waterford"
|
# -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests, fiware_region_object_storage_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest,
fiware_region_object_storage_tests.FiwareRegionsObjectStorageTests):
region_name = "Waterford"
Disable the Object Storage tests in Waterford# -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest):
region_name = "Waterford"
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests, fiware_region_object_storage_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest,
fiware_region_object_storage_tests.FiwareRegionsObjectStorageTests):
region_name = "Waterford"
<commit_msg>Disable the Object Storage tests in Waterford<commit_after># -*- coding: utf-8 -*-
# Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U
#
# This file is part of FIWARE project.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
# For those usages not covered by the Apache version 2.0 License please
# contact with opensource@tid.es
__author__ = 'jfernandez'
from tests import fiware_region_with_networks_tests
class TestSuite(fiware_region_with_networks_tests.FiwareRegionWithNetworkTest):
region_name = "Waterford"
|
a11598e46ec882b74d5ce4c8694e1db86691a24f
|
accelerator/tests/factories/program_family_factory.py
|
accelerator/tests/factories/program_family_factory.py
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
# physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
|
Remove physical_address from factory, not sure why this helps
|
[AC-6976] Remove physical_address from factory, not sure why this helps
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
[AC-6976] Remove physical_address from factory, not sure why this helps
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
# physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
|
<commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
<commit_msg>[AC-6976] Remove physical_address from factory, not sure why this helps<commit_after>
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
# physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
[AC-6976] Remove physical_address from factory, not sure why this helps# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
# physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
|
<commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
<commit_msg>[AC-6976] Remove physical_address from factory, not sure why this helps<commit_after># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from __future__ import unicode_literals
import swapper
from factory import (
DjangoModelFactory,
Sequence,
)
from accelerator.apps import AcceleratorConfig
ProgramFamily = swapper.load_model(AcceleratorConfig.name, 'ProgramFamily')
class ProgramFamilyFactory(DjangoModelFactory):
class Meta:
model = ProgramFamily
name = Sequence(lambda n: "Program Family {0}".format(n))
short_description = 'A program family for testing'
url_slug = Sequence(lambda n: "pf{0}".format(n))
email_domain = Sequence(lambda n: "pf{0}.accelerator.org".format(n))
phone_number = "617-555-1212"
# physical_address = "Boston"
is_open_for_startups = True
is_open_for_experts = True
use_site_tree_side_nav = False
|
72f23c104a28fe4c91d5d36d3f939e110c6f16e3
|
exercises/chapter_04/exercise_04_01/exercise_04_01.py
|
exercises/chapter_04/exercise_04_01/exercise_04_01.py
|
# 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
|
# 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
print("I really like pizza!")
|
Add final version of exercise 4.1.
|
Add final version of exercise 4.1.
|
Python
|
mit
|
HenrikSamuelsson/python-crash-course
|
# 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
Add final version of exercise 4.1.
|
# 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
print("I really like pizza!")
|
<commit_before># 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
<commit_msg>Add final version of exercise 4.1.<commit_after>
|
# 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
print("I really like pizza!")
|
# 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
Add final version of exercise 4.1.# 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
print("I really like pizza!")
|
<commit_before># 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
<commit_msg>Add final version of exercise 4.1.<commit_after># 4-1. Pizzas
favorite_pizzas = ["Columpus", "Marco Polo", "Amerikana"]
for pizza in favorite_pizzas:
print("I like " + pizza + " pizza.")
print("I really like pizza!")
|
1af3cc43ae482549ee058e801b4f65e2af78653c
|
grow/testing/testdata/pod/extensions/preprocessors.py
|
grow/testing/testdata/pod/extensions/preprocessors.py
|
from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
import grow
from protorpc import messages
class CustomPreprocessor(grow.Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
Update preprocessor testdata to use grow.Preprocessor.
|
Update preprocessor testdata to use grow.Preprocessor.
|
Python
|
mit
|
grow/pygrow,denmojo/pygrow,grow/grow,grow/grow,grow/pygrow,denmojo/pygrow,denmojo/pygrow,grow/grow,denmojo/pygrow,grow/grow,grow/pygrow
|
from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
Update preprocessor testdata to use grow.Preprocessor.
|
import grow
from protorpc import messages
class CustomPreprocessor(grow.Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
<commit_before>from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
<commit_msg>Update preprocessor testdata to use grow.Preprocessor.<commit_after>
|
import grow
from protorpc import messages
class CustomPreprocessor(grow.Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
Update preprocessor testdata to use grow.Preprocessor.import grow
from protorpc import messages
class CustomPreprocessor(grow.Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
<commit_before>from grow import Preprocessor
from protorpc import messages
class CustomPreprocessor(Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
<commit_msg>Update preprocessor testdata to use grow.Preprocessor.<commit_after>import grow
from protorpc import messages
class CustomPreprocessor(grow.Preprocessor):
KIND = 'custom_preprocessor'
class Config(messages.Message):
value = messages.StringField(1)
def run(self, **kwargs):
# To allow the test to check the result
self.pod._custom_preprocessor_value = self.config.value
|
0bb9c99d50662001237f0fc55228a9aa68fc211c
|
neutron/services/qos/constants.py
|
neutron/services/qos/constants.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
VALID_RULE_TYPES = qos_consts.VALID_RULE_TYPES + [RULE_TYPE_PACKET_RATE_LIMIT]
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
# NOTE(przszc): Ensure that there are no duplicates in the list. Order of the
# items in the list must be stable, as QosRuleType OVO hash value depends on
# it.
# TODO(przszc): When a rule type is moved to neutron-lib, it can be removed
# from the list below.
VALID_RULE_TYPES = (qos_consts.VALID_RULE_TYPES +
([RULE_TYPE_PACKET_RATE_LIMIT] if RULE_TYPE_PACKET_RATE_LIMIT not in
qos_consts.VALID_RULE_TYPES else [])
)
|
Fix gate for neutron-lib v2.14
|
Fix gate for neutron-lib v2.14
A patch [1] that updates upper-constraints.txt for neutron-lib 2.14 fails
to pass check pipeline.
Due to development sequence it's necessary to append a new rule type
to VALID_RULE_TYPES in Neutron first, and then move it to neutron-lib.
Because of this process, there's a risk that the new rule type is
present in both Neutron and in neutron-lib.
This is why check pipeline keeps failing with neutron-lib v2.14, as it
contains 'packet_rate_limit' and in Neutron we append it to the list
anyways, ending up with duplicates.
This patch ensures that there are no duplicates in VALID_RULE_TYPES.
[1] https://review.opendev.org/c/openstack/requirements/+/805352
Change-Id: Ib6963f402c9fec8169afcf467d613bba4e06130d
|
Python
|
apache-2.0
|
openstack/neutron,openstack/neutron,mahak/neutron,openstack/neutron,mahak/neutron,mahak/neutron
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
VALID_RULE_TYPES = qos_consts.VALID_RULE_TYPES + [RULE_TYPE_PACKET_RATE_LIMIT]
Fix gate for neutron-lib v2.14
A patch [1] that updates upper-constraints.txt for neutron-lib 2.14 fails
to pass check pipeline.
Due to development sequence it's necessary to append a new rule type
to VALID_RULE_TYPES in Neutron first, and then move it to neutron-lib.
Because of this process, there's a risk that the new rule type is
present in both Neutron and in neutron-lib.
This is why check pipeline keeps failing with neutron-lib v2.14, as it
contains 'packet_rate_limit' and in Neutron we append it to the list
anyways, ending up with duplicates.
This patch ensures that there are no duplicates in VALID_RULE_TYPES.
[1] https://review.opendev.org/c/openstack/requirements/+/805352
Change-Id: Ib6963f402c9fec8169afcf467d613bba4e06130d
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
# NOTE(przszc): Ensure that there are no duplicates in the list. Order of the
# items in the list must be stable, as QosRuleType OVO hash value depends on
# it.
# TODO(przszc): When a rule type is moved to neutron-lib, it can be removed
# from the list below.
VALID_RULE_TYPES = (qos_consts.VALID_RULE_TYPES +
([RULE_TYPE_PACKET_RATE_LIMIT] if RULE_TYPE_PACKET_RATE_LIMIT not in
qos_consts.VALID_RULE_TYPES else [])
)
|
<commit_before>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
VALID_RULE_TYPES = qos_consts.VALID_RULE_TYPES + [RULE_TYPE_PACKET_RATE_LIMIT]
<commit_msg>Fix gate for neutron-lib v2.14
A patch [1] that updates upper-constraints.txt for neutron-lib 2.14 fails
to pass check pipeline.
Due to development sequence it's necessary to append a new rule type
to VALID_RULE_TYPES in Neutron first, and then move it to neutron-lib.
Because of this process, there's a risk that the new rule type is
present in both Neutron and in neutron-lib.
This is why check pipeline keeps failing with neutron-lib v2.14, as it
contains 'packet_rate_limit' and in Neutron we append it to the list
anyways, ending up with duplicates.
This patch ensures that there are no duplicates in VALID_RULE_TYPES.
[1] https://review.opendev.org/c/openstack/requirements/+/805352
Change-Id: Ib6963f402c9fec8169afcf467d613bba4e06130d<commit_after>
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
# NOTE(przszc): Ensure that there are no duplicates in the list. Order of the
# items in the list must be stable, as QosRuleType OVO hash value depends on
# it.
# TODO(przszc): When a rule type is moved to neutron-lib, it can be removed
# from the list below.
VALID_RULE_TYPES = (qos_consts.VALID_RULE_TYPES +
([RULE_TYPE_PACKET_RATE_LIMIT] if RULE_TYPE_PACKET_RATE_LIMIT not in
qos_consts.VALID_RULE_TYPES else [])
)
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
VALID_RULE_TYPES = qos_consts.VALID_RULE_TYPES + [RULE_TYPE_PACKET_RATE_LIMIT]
Fix gate for neutron-lib v2.14
A patch [1] that updates upper-constraints.txt for neutron-lib 2.14 fails
to pass check pipeline.
Due to development sequence it's necessary to append a new rule type
to VALID_RULE_TYPES in Neutron first, and then move it to neutron-lib.
Because of this process, there's a risk that the new rule type is
present in both Neutron and in neutron-lib.
This is why check pipeline keeps failing with neutron-lib v2.14, as it
contains 'packet_rate_limit' and in Neutron we append it to the list
anyways, ending up with duplicates.
This patch ensures that there are no duplicates in VALID_RULE_TYPES.
[1] https://review.opendev.org/c/openstack/requirements/+/805352
Change-Id: Ib6963f402c9fec8169afcf467d613bba4e06130d#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
# NOTE(przszc): Ensure that there are no duplicates in the list. Order of the
# items in the list must be stable, as QosRuleType OVO hash value depends on
# it.
# TODO(przszc): When a rule type is moved to neutron-lib, it can be removed
# from the list below.
VALID_RULE_TYPES = (qos_consts.VALID_RULE_TYPES +
([RULE_TYPE_PACKET_RATE_LIMIT] if RULE_TYPE_PACKET_RATE_LIMIT not in
qos_consts.VALID_RULE_TYPES else [])
)
|
<commit_before>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
VALID_RULE_TYPES = qos_consts.VALID_RULE_TYPES + [RULE_TYPE_PACKET_RATE_LIMIT]
<commit_msg>Fix gate for neutron-lib v2.14
A patch [1] that updates upper-constraints.txt for neutron-lib 2.14 fails
to pass check pipeline.
Due to development sequence it's necessary to append a new rule type
to VALID_RULE_TYPES in Neutron first, and then move it to neutron-lib.
Because of this process, there's a risk that the new rule type is
present in both Neutron and in neutron-lib.
This is why check pipeline keeps failing with neutron-lib v2.14, as it
contains 'packet_rate_limit' and in Neutron we append it to the list
anyways, ending up with duplicates.
This patch ensures that there are no duplicates in VALID_RULE_TYPES.
[1] https://review.opendev.org/c/openstack/requirements/+/805352
Change-Id: Ib6963f402c9fec8169afcf467d613bba4e06130d<commit_after>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron_lib.services.qos import constants as qos_consts
# TODO(liuyulong): Because of the development sequence, the rule must
# be implemented in Neutron first. Then the following can be moved
# to neutron-lib after neutron has the new rule.
# Add qos rule packet rate limit
RULE_TYPE_PACKET_RATE_LIMIT = 'packet_rate_limit'
# NOTE(przszc): Ensure that there are no duplicates in the list. Order of the
# items in the list must be stable, as QosRuleType OVO hash value depends on
# it.
# TODO(przszc): When a rule type is moved to neutron-lib, it can be removed
# from the list below.
VALID_RULE_TYPES = (qos_consts.VALID_RULE_TYPES +
([RULE_TYPE_PACKET_RATE_LIMIT] if RULE_TYPE_PACKET_RATE_LIMIT not in
qos_consts.VALID_RULE_TYPES else [])
)
|
666622968cddc5f9f62e044da8a4f1b779c1532b
|
gn/find_msvc.py
|
gn/find_msvc.py
|
#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
|
#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools', 'Preview']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
|
Add Preview to list of possible MSVC versions.
|
Add Preview to list of possible MSVC versions.
Needed to test a Preview version of MSVC and adding it to the list here
makes it a bit easier and the list more complete.
Change-Id: I419636722303816f0cd961408229fcef0773e8e0
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/286496
Reviewed-by: Mike Klein <14574f09dfa9b4e14759b88c3426a495a0e627b0@google.com>
Commit-Queue: Ben Wagner <53965154ffeecc4a10886eab9259e95965f2979a@google.com>
|
Python
|
bsd-3-clause
|
google/skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia
|
#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
Add Preview to list of possible MSVC versions.
Needed to test a Preview version of MSVC and adding it to the list here
makes it a bit easier and the list more complete.
Change-Id: I419636722303816f0cd961408229fcef0773e8e0
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/286496
Reviewed-by: Mike Klein <14574f09dfa9b4e14759b88c3426a495a0e627b0@google.com>
Commit-Queue: Ben Wagner <53965154ffeecc4a10886eab9259e95965f2979a@google.com>
|
#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools', 'Preview']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
|
<commit_before>#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
<commit_msg>Add Preview to list of possible MSVC versions.
Needed to test a Preview version of MSVC and adding it to the list here
makes it a bit easier and the list more complete.
Change-Id: I419636722303816f0cd961408229fcef0773e8e0
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/286496
Reviewed-by: Mike Klein <14574f09dfa9b4e14759b88c3426a495a0e627b0@google.com>
Commit-Queue: Ben Wagner <53965154ffeecc4a10886eab9259e95965f2979a@google.com><commit_after>
|
#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools', 'Preview']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
|
#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
Add Preview to list of possible MSVC versions.
Needed to test a Preview version of MSVC and adding it to the list here
makes it a bit easier and the list more complete.
Change-Id: I419636722303816f0cd961408229fcef0773e8e0
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/286496
Reviewed-by: Mike Klein <14574f09dfa9b4e14759b88c3426a495a0e627b0@google.com>
Commit-Queue: Ben Wagner <53965154ffeecc4a10886eab9259e95965f2979a@google.com>#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools', 'Preview']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
|
<commit_before>#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
<commit_msg>Add Preview to list of possible MSVC versions.
Needed to test a Preview version of MSVC and adding it to the list here
makes it a bit easier and the list more complete.
Change-Id: I419636722303816f0cd961408229fcef0773e8e0
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/286496
Reviewed-by: Mike Klein <14574f09dfa9b4e14759b88c3426a495a0e627b0@google.com>
Commit-Queue: Ben Wagner <53965154ffeecc4a10886eab9259e95965f2979a@google.com><commit_after>#!/usr/bin/env python
# Copyright 2019 Google Inc.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import subprocess
'''
Look for the first match in the format
C:\\Program Files (x86)\\Microsoft Visual Studio\\${RELEASE}\\${VERSION}\\VC
'''
def find_msvc():
if sys.platform.startswith('win'):
default_dir = r'C:\Program Files (x86)\Microsoft Visual Studio'
for release in ['2019', '2017']:
for version in ['Enterprise', 'Professional', 'Community', 'BuildTools', 'Preview']:
path = os.path.join(default_dir, release, version, 'VC')
if os.path.isdir(path):
return path
# Fall back to vswhere.exe to determine non-standard installation paths
# Fixed location, https://github.com/Microsoft/vswhere/wiki/Installing
vswhere = os.path.join(os.getenv('ProgramFiles(x86)'),
'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
command = (vswhere + ' -prerelease -legacy -products * -sort -utf8 '
'-property installationPath')
paths = subprocess.check_output(command).decode('utf-8').splitlines()
if paths:
return paths[0] + '\\VC'
return None
if __name__ == '__main__':
result = find_msvc()
if result:
sys.stdout.write(result + '\n')
|
7f8455c9687e8c7750fe1cfcbfdf4fd720888012
|
iis/__init__.py
|
iis/__init__.py
|
import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar('IIS_FLASK_SETTINGS')
app.config.from_object(config)
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config['LOGGING'])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
|
import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
import iis.jobs
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar("IIS_FLASK_SETTINGS")
app.config.from_object(config)
# Register blueprints
app.register_blueprint(iis.jobs.jobs, url_prefix="/jobs")
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config["LOGGING"])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
|
Use '"' for string delimeter
|
Use '"' for string delimeter
|
Python
|
agpl-3.0
|
interactomix/iis,interactomix/iis
|
import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar('IIS_FLASK_SETTINGS')
app.config.from_object(config)
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config['LOGGING'])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
Use '"' for string delimeter
|
import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
import iis.jobs
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar("IIS_FLASK_SETTINGS")
app.config.from_object(config)
# Register blueprints
app.register_blueprint(iis.jobs.jobs, url_prefix="/jobs")
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config["LOGGING"])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
|
<commit_before>import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar('IIS_FLASK_SETTINGS')
app.config.from_object(config)
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config['LOGGING'])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
<commit_msg>Use '"' for string delimeter<commit_after>
|
import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
import iis.jobs
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar("IIS_FLASK_SETTINGS")
app.config.from_object(config)
# Register blueprints
app.register_blueprint(iis.jobs.jobs, url_prefix="/jobs")
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config["LOGGING"])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
|
import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar('IIS_FLASK_SETTINGS')
app.config.from_object(config)
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config['LOGGING'])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
Use '"' for string delimeterimport logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
import iis.jobs
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar("IIS_FLASK_SETTINGS")
app.config.from_object(config)
# Register blueprints
app.register_blueprint(iis.jobs.jobs, url_prefix="/jobs")
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config["LOGGING"])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
|
<commit_before>import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar('IIS_FLASK_SETTINGS')
app.config.from_object(config)
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config['LOGGING'])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
<commit_msg>Use '"' for string delimeter<commit_after>import logging.config
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from flask_user import UserManager, SQLAlchemyAdapter
from flask_bootstrap import Bootstrap
import iis.jobs
def create_app(config: object) -> Flask:
"""Create the flask app. Can be called from testing contexts"""
app = Flask(__name__)
app.config.from_envvar("IIS_FLASK_SETTINGS")
app.config.from_object(config)
# Register blueprints
app.register_blueprint(iis.jobs.jobs, url_prefix="/jobs")
# Call app.logger to prevent it from clobbering configuration
app.logger
logging.config.dictConfig(app.config["LOGGING"])
app.logger.info("App configured.")
return app
app = create_app(None)
# Set up SQLAlchemy and Migrate
db = SQLAlchemy(app) # type: SQLAlchemy
migrate = Migrate(app, db)
# Load Flask-Mail
mail = Mail(app)
# Set up bootstrap
Bootstrap(app)
# Configure user model for Flask-User
from iis.models import User # noqa: E402
db_adapter = SQLAlchemyAdapter(db, User)
user_manager = UserManager(db_adapter, app)
from iis import views, models # noqa: E402, F401
|
7594a1dc0573576e92cfbe8bdcaeab4d5732b3c3
|
jpa/eclipselink.jpa.test/resource/weblogic/wls_start.py
|
jpa/eclipselink.jpa.test/resource/weblogic/wls_start.py
|
############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
|
############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
|
Change start-weblogic-server using jdk as default.
|
Change start-weblogic-server using jdk as default.
Code reviewed by Edwin Tang
Former-commit-id: 6e2cbb2da770d73e12dfae7d36fd6f1ef00c4ed7
|
Python
|
epl-1.0
|
bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs
|
############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
Change start-weblogic-server using jdk as default.
Code reviewed by Edwin Tang
Former-commit-id: 6e2cbb2da770d73e12dfae7d36fd6f1ef00c4ed7
|
############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
|
<commit_before>############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
<commit_msg>Change start-weblogic-server using jdk as default.
Code reviewed by Edwin Tang
Former-commit-id: 6e2cbb2da770d73e12dfae7d36fd6f1ef00c4ed7<commit_after>
|
############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
|
############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
Change start-weblogic-server using jdk as default.
Code reviewed by Edwin Tang
Former-commit-id: 6e2cbb2da770d73e12dfae7d36fd6f1ef00c4ed7############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
|
<commit_before>############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
<commit_msg>Change start-weblogic-server using jdk as default.
Code reviewed by Edwin Tang
Former-commit-id: 6e2cbb2da770d73e12dfae7d36fd6f1ef00c4ed7<commit_after>############################################################################
# Generic script applicable on any Operating Environments (Unix, Windows)
# ScriptName : wls_start.py
# Properties : weblogic.properties
# Author : Kevin Yuan
############################################################################
#===========================================================================
# Start server using wlst command
#===========================================================================
#startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-Xms256m -Xmx960m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Using the following instead of above "jvmarg" setting when using SUN jdk
# because jrockit doesn't support PermSize when the server run on SUN jdk
#===========================================================================
startServer('%%TARGET_SERVER%%', 'eclipselink', url='t3://%%WL_HOST%%:%%WL_PORT%%', username='%%WL_USR%%', password='%%WL_PWD%%', domainDir='%%WL_DOMAIN%%', jvmArgs='-XX:PermSize=128m -XX:MaxPermSize=256m -Dweblogic.Stdout=stdout.log -Dweblogic.Stderr=stderr.log')
#===========================================================================
# Add the following jvmarg(s) into wlst command when you try to debug
#===========================================================================
#-Xdebug
#-Xnoagent
#-Xrunjdwp:transport=dt_socket,server=y,suspend=n,address=4000
|
bbfdbc4b5b6a35105a65910a878be85040cf5263
|
VMEncryption/main/oscrypto/encryptstates/OSEncryptionState.py
|
VMEncryption/main/oscrypto/encryptstates/OSEncryptionState.py
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
self.state_executed = False
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
|
Remove var declaration from abstract base class
|
Remove var declaration from abstract base class
|
Python
|
apache-2.0
|
soumyanishan/azure-linux-extensions,bpramod/azure-linux-extensions,Azure/azure-linux-extensions,Azure/azure-linux-extensions,vityagi/azure-linux-extensions,Azure/azure-linux-extensions,Azure/azure-linux-extensions,andyliuliming/azure-linux-extensions,andyliuliming/azure-linux-extensions,jasonzio/azure-linux-extensions,krkhan/azure-linux-extensions,varunkumta/azure-linux-extensions,Azure/azure-linux-extensions,jasonzio/azure-linux-extensions,krkhan/azure-linux-extensions,bpramod/azure-linux-extensions,krkhan/azure-linux-extensions,jasonzio/azure-linux-extensions,soumyanishan/azure-linux-extensions,bpramod/azure-linux-extensions,vityagi/azure-linux-extensions,Azure/azure-linux-extensions,soumyanishan/azure-linux-extensions,soumyanishan/azure-linux-extensions,varunkumta/azure-linux-extensions,varunkumta/azure-linux-extensions,bpramod/azure-linux-extensions,vityagi/azure-linux-extensions,varunkumta/azure-linux-extensions,andyliuliming/azure-linux-extensions,andyliuliming/azure-linux-extensions,bpramod/azure-linux-extensions,bpramod/azure-linux-extensions,vityagi/azure-linux-extensions,vityagi/azure-linux-extensions,krkhan/azure-linux-extensions,vityagi/azure-linux-extensions,jasonzio/azure-linux-extensions,vityagi/azure-linux-extensions,Azure/azure-linux-extensions,bpramod/azure-linux-extensions,soumyanishan/azure-linux-extensions
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
self.state_executed = False
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
Remove var declaration from abstract base class
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
|
<commit_before>#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
self.state_executed = False
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
<commit_msg>Remove var declaration from abstract base class<commit_after>
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
|
#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
self.state_executed = False
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
Remove var declaration from abstract base class#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
|
<commit_before>#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
self.state_executed = False
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
<commit_msg>Remove var declaration from abstract base class<commit_after>#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from collections import namedtuple
class OSEncryptionState(object):
def __init__(self, context):
super(OSEncryptionState, self).__init__()
def enter(self):
assert 0, "implement enter"
def should_exit(self):
assert 0, "implement should_exit"
OSEncryptionStateContext = namedtuple('OSEncryptionStateContext',
['hutil',
'distro_patcher',
'logger',
'encryption_environment'])
|
a248ac96a04cccc31f881496e45db3212ad46118
|
core/components/security/factor.py
|
core/components/security/factor.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
return user, False
return user, True
|
Fix server error when login with u2f
|
Fix server error when login with u2f
|
Python
|
mit
|
chiaki64/Windless,chiaki64/Windless
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
Fix server error when login with u2f
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
return user, False
return user, True
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
<commit_msg>Fix server error when login with u2f<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
return user, False
return user, True
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
Fix server error when login with u2f#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
return user, False
return user, True
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data, [facet])
except AttributeError:
return user, False
return user, True
<commit_msg>Fix server error when login with u2f<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
from u2flib_server.u2f import (begin_registration,
begin_authentication,
complete_registration,
complete_authentication)
from components.eternity import config
facet = config.rss['link']
async def enroll(user):
enroll = begin_registration(facet, user.get('_u2f_devices_', []))
user['_u2f_enroll_'] = enroll.json
return user, json.dumps(enroll.data_for_client)
async def bind(user, data):
response = data['tokenResponse']
enroll = user.pop('_u2f_enroll_')
device, cert = complete_registration(enroll, response, [facet])
patch = device
patch['deviceName'] = data['deviceName']
patch['registerDate'] = data['date']
user.setdefault('_u2f_devices_', []).append(json.dumps(patch))
# cert = x509.load_der_x509_certificate(cert, default_backend())
return user, True
async def sign(user):
challenge = begin_authentication(facet, user.get('_u2f_devices_', []))
user['_u2f_challenge_'] = challenge.json
return user, json.dumps(challenge.data_for_client)
async def verify(user, data):
challenge = user.pop('_u2f_challenge_')
try:
complete_authentication(challenge, data['tokenResponse'], [facet])
except AttributeError:
return user, False
return user, True
|
cc27883cd84794f29d9fddab174bb41fc305cdb7
|
test_fallback.py
|
test_fallback.py
|
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
|
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
platform.python_implementation = lambda:'CPython'
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
|
Test both branches of pylibsodium_salsa choice
|
Test both branches of pylibsodium_salsa choice
|
Python
|
isc
|
jvarho/pylibscrypt,jvarho/pylibscrypt
|
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
Test both branches of pylibsodium_salsa choice
|
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
platform.python_implementation = lambda:'CPython'
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
|
<commit_before>
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
<commit_msg>Test both branches of pylibsodium_salsa choice<commit_after>
|
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
platform.python_implementation = lambda:'CPython'
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
|
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
Test both branches of pylibsodium_salsa choice
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
platform.python_implementation = lambda:'CPython'
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
|
<commit_before>
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
<commit_msg>Test both branches of pylibsodium_salsa choice<commit_after>
import platform
import sys
platform.python_implementation = lambda:'PyPy'
def unimport():
del sys.modules['pylibscrypt']
sys.modules.pop('pylibscrypt.common', None)
sys.modules.pop('pylibscrypt.mcf', None)
sys.modules['pylibscrypt.pylibscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pyscrypt'] = None
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium'] = None
import pylibscrypt
unimport()
platform.python_implementation = lambda:'CPython'
import pylibscrypt
unimport()
sys.modules['pylibscrypt.pylibsodium_salsa'] = None
import pylibscrypt
|
94a981693b6a89aa2035aae5ed039a5108196151
|
test_utils/crawler/plugins/time_plugin.py
|
test_utils/crawler/plugins/time_plugin.py
|
import time
import logging
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = Time
|
import time
import logging
import csv
import os
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
csv_writer = None
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def set_output_dir(self, output_dir=None):
super(Time, self).set_output_dir(output_dir)
if output_dir:
self.csv_writer = csv.writer(open(os.path.join(output_dir, 'url_times.csv'), 'w'))
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
if self.csv_writer:
self.csv_writer.writerow((url, self.timed_urls[url]))
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = Time
|
Write out request times to a csv of output_dir is specified
|
Write out request times to a csv of output_dir is specified
|
Python
|
mit
|
ericholscher/django-test-utils,ericholscher/django-test-utils
|
import time
import logging
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = TimeWrite out request times to a csv of output_dir is specified
|
import time
import logging
import csv
import os
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
csv_writer = None
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def set_output_dir(self, output_dir=None):
super(Time, self).set_output_dir(output_dir)
if output_dir:
self.csv_writer = csv.writer(open(os.path.join(output_dir, 'url_times.csv'), 'w'))
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
if self.csv_writer:
self.csv_writer.writerow((url, self.timed_urls[url]))
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = Time
|
<commit_before>import time
import logging
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = Time<commit_msg>Write out request times to a csv of output_dir is specified<commit_after>
|
import time
import logging
import csv
import os
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
csv_writer = None
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def set_output_dir(self, output_dir=None):
super(Time, self).set_output_dir(output_dir)
if output_dir:
self.csv_writer = csv.writer(open(os.path.join(output_dir, 'url_times.csv'), 'w'))
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
if self.csv_writer:
self.csv_writer.writerow((url, self.timed_urls[url]))
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = Time
|
import time
import logging
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = TimeWrite out request times to a csv of output_dir is specifiedimport time
import logging
import csv
import os
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
csv_writer = None
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def set_output_dir(self, output_dir=None):
super(Time, self).set_output_dir(output_dir)
if output_dir:
self.csv_writer = csv.writer(open(os.path.join(output_dir, 'url_times.csv'), 'w'))
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
if self.csv_writer:
self.csv_writer.writerow((url, self.timed_urls[url]))
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = Time
|
<commit_before>import time
import logging
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = Time<commit_msg>Write out request times to a csv of output_dir is specified<commit_after>import time
import logging
import csv
import os
from base import Plugin
LOG = logging.getLogger('crawler')
class Time(Plugin):
"""
Follow the time it takes to run requests.
"""
csv_writer = None
def __init__(self):
super(Time, self).__init__()
self.timed_urls = self.data['timed_urls'] = {}
def set_output_dir(self, output_dir=None):
super(Time, self).set_output_dir(output_dir)
if output_dir:
self.csv_writer = csv.writer(open(os.path.join(output_dir, 'url_times.csv'), 'w'))
def pre_request(self, sender, **kwargs):
url = kwargs['url']
self.timed_urls[url] = time.time()
def post_request(self, sender, **kwargs):
cur = time.time()
url = kwargs['url']
old_time = self.timed_urls[url]
total_time = cur - old_time
self.timed_urls[url] = total_time
LOG.debug("Time taken: %s", self.timed_urls[url])
if self.csv_writer:
self.csv_writer.writerow((url, self.timed_urls[url]))
def finish_run(self, sender, **kwargs):
"Print the longest time it took for pages to load"
alist = sorted(self.timed_urls.iteritems(), key=lambda (k,v): (v,k), reverse=True)
for url, ttime in alist[:10]:
LOG.info("%s took %f", url, ttime)
PLUGIN = Time
|
480525b10dcac543a34a09b00051bd3dca1609f0
|
src/ggrc/migrations/versions/20151204135707_504f541411a5_comment_assignee_type.py
|
src/ggrc/migrations/versions/20151204135707_504f541411a5_comment_assignee_type.py
|
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
|
Add licence header to migration
|
Add licence header to migration
|
Python
|
apache-2.0
|
jmakov/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core
|
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
Add licence header to migration
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
|
<commit_before>
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
<commit_msg>Add licence header to migration<commit_after>
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
|
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
Add licence header to migration# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
|
<commit_before>
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
<commit_msg>Add licence header to migration<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: ivan@reciprocitylabs.com
# Maintained By: ivan@reciprocitylabs.com
"""Comment assignee type
Revision ID: 504f541411a5
Revises: 18cbdd3a7fd9
Create Date: 2015-12-04 13:57:07.047217
"""
# revision identifiers, used by Alembic.
revision = '504f541411a5'
down_revision = '18cbdd3a7fd9'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
"comments",
sa.Column("assignee_type", sa.Text(), nullable=True)
)
def downgrade():
op.drop_column("comments", "assignee_type")
|
e692ea935713b21dbaefb8cf270831413b5f7bd2
|
mzalendo/core/management/commands/core_fix_ward_names.py
|
mzalendo/core/management/commands/core_fix_ward_names.py
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
def slugify_place_name(place_name):
return 'ward-' + slugify(place_name)
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
new_version_slug = slugify_place_name(new_version)
if (new_version != ward.name) or (new_version_slug != ward.slug):
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = new_version_slug
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
Update ward names even if just the slug would have changed
|
Update ward names even if just the slug would have changed
|
Python
|
agpl-3.0
|
mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,patricmutwiri/pombola,ken-muturi/pombola,geoffkilpin/pombola,patricmutwiri/pombola,ken-muturi/pombola,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,mysociety/pombola,patricmutwiri/pombola,hzj123/56th,ken-muturi/pombola,geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,hzj123/56th,hzj123/56th,hzj123/56th,mysociety/pombola,ken-muturi/pombola,patricmutwiri/pombola,mysociety/pombola
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
Update ward names even if just the slug would have changed
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
def slugify_place_name(place_name):
return 'ward-' + slugify(place_name)
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
new_version_slug = slugify_place_name(new_version)
if (new_version != ward.name) or (new_version_slug != ward.slug):
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = new_version_slug
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
<commit_before>import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
<commit_msg>Update ward names even if just the slug would have changed<commit_after>
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
def slugify_place_name(place_name):
return 'ward-' + slugify(place_name)
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
new_version_slug = slugify_place_name(new_version)
if (new_version != ward.name) or (new_version_slug != ward.slug):
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = new_version_slug
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
Update ward names even if just the slug would have changedimport re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
def slugify_place_name(place_name):
return 'ward-' + slugify(place_name)
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
new_version_slug = slugify_place_name(new_version)
if (new_version != ward.name) or (new_version_slug != ward.slug):
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = new_version_slug
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
<commit_before>import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
if new_version != ward.name:
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = 'ward-' + slugify(ward.name)
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
<commit_msg>Update ward names even if just the slug would have changed<commit_after>import re
from django.core.management.base import NoArgsCommand, CommandError
from django.template.defaultfilters import slugify
from optparse import make_option
from core.models import PlaceKind, Place
def slugify_place_name(place_name):
return 'ward-' + slugify(place_name)
class Command(NoArgsCommand):
help = 'Standardize the form of ward names with regard to / and - separators'
option_list = NoArgsCommand.option_list + (
make_option('--commit', action='store_true', dest='commit', help='Actually update the database'),
)
def handle_noargs(self, **options):
for ward in Place.objects.filter(kind=PlaceKind.objects.get(slug='ward')):
# print "ward is:", ward
new_version = re.sub(r'(\w) *([/-]) *(\w)', '\\1 \\2 \\3', ward.name)
new_version_slug = slugify_place_name(new_version)
if (new_version != ward.name) or (new_version_slug != ward.slug):
if options['commit']:
print "changing:", ward.name, "to", new_version
ward.name = new_version
ward.slug = new_version_slug
ward.save()
else:
print "would change:", ward.name, "to", new_version, "if --commit were specified"
|
d144e30d557ea2f4b03a2f0b7fb68f1cee54a602
|
cla_backend/apps/legalaid/migrations/0023_migrate_contact_for_research_via_field.py
|
cla_backend/apps/legalaid/migrations/0023_migrate_contact_for_research_via_field.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import Q
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
research_methods = {method.method: method.id for method in ContactResearchMethod.objects.all()}
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
models = PersonalDetails.objects.exclude(Q(contact_for_research_via="") | Q(contact_for_research_via=None))
for model in models:
if not list(model.contact_for_research_methods.all()):
model.contact_for_research_methods = [research_methods.get(model.contact_for_research_via)]
model.save()
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
for method in ContactResearchMethod.objects.all():
details_qs = PersonalDetails.objects.filter(
contact_for_research_via=method.method, contact_for_research_methods__isnull=True
)
for details in details_qs:
details.contact_for_research_methods.add(method)
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
|
Simplify data migration and make it safe to rerun
|
Simplify data migration and make it safe to rerun
|
Python
|
mit
|
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import Q
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
research_methods = {method.method: method.id for method in ContactResearchMethod.objects.all()}
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
models = PersonalDetails.objects.exclude(Q(contact_for_research_via="") | Q(contact_for_research_via=None))
for model in models:
if not list(model.contact_for_research_methods.all()):
model.contact_for_research_methods = [research_methods.get(model.contact_for_research_via)]
model.save()
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
Simplify data migration and make it safe to rerun
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
for method in ContactResearchMethod.objects.all():
details_qs = PersonalDetails.objects.filter(
contact_for_research_via=method.method, contact_for_research_methods__isnull=True
)
for details in details_qs:
details.contact_for_research_methods.add(method)
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import Q
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
research_methods = {method.method: method.id for method in ContactResearchMethod.objects.all()}
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
models = PersonalDetails.objects.exclude(Q(contact_for_research_via="") | Q(contact_for_research_via=None))
for model in models:
if not list(model.contact_for_research_methods.all()):
model.contact_for_research_methods = [research_methods.get(model.contact_for_research_via)]
model.save()
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
<commit_msg>Simplify data migration and make it safe to rerun<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
for method in ContactResearchMethod.objects.all():
details_qs = PersonalDetails.objects.filter(
contact_for_research_via=method.method, contact_for_research_methods__isnull=True
)
for details in details_qs:
details.contact_for_research_methods.add(method)
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import Q
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
research_methods = {method.method: method.id for method in ContactResearchMethod.objects.all()}
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
models = PersonalDetails.objects.exclude(Q(contact_for_research_via="") | Q(contact_for_research_via=None))
for model in models:
if not list(model.contact_for_research_methods.all()):
model.contact_for_research_methods = [research_methods.get(model.contact_for_research_via)]
model.save()
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
Simplify data migration and make it safe to rerun# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
for method in ContactResearchMethod.objects.all():
details_qs = PersonalDetails.objects.filter(
contact_for_research_via=method.method, contact_for_research_methods__isnull=True
)
for details in details_qs:
details.contact_for_research_methods.add(method)
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
from django.db.models import Q
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
research_methods = {method.method: method.id for method in ContactResearchMethod.objects.all()}
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
models = PersonalDetails.objects.exclude(Q(contact_for_research_via="") | Q(contact_for_research_via=None))
for model in models:
if not list(model.contact_for_research_methods.all()):
model.contact_for_research_methods = [research_methods.get(model.contact_for_research_via)]
model.save()
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
<commit_msg>Simplify data migration and make it safe to rerun<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def migrate_contact_for_research_via_field_data(apps, schema_editor):
ContactResearchMethod = apps.get_model("legalaid", "ContactResearchMethod")
PersonalDetails = apps.get_model("legalaid", "PersonalDetails")
for method in ContactResearchMethod.objects.all():
details_qs = PersonalDetails.objects.filter(
contact_for_research_via=method.method, contact_for_research_methods__isnull=True
)
for details in details_qs:
details.contact_for_research_methods.add(method)
def rollback_migrate_contact_for_research_via_field_data(apps, schema_editor):
pass
class Migration(migrations.Migration):
dependencies = [("legalaid", "0022_default_contact_for_research_methods")]
operations = [
migrations.RunPython(
migrate_contact_for_research_via_field_data, rollback_migrate_contact_for_research_via_field_data
)
]
|
7d993541b9097062d922bdd8030f7ef1bcbb0129
|
apps/package/handlers/launchpad.py
|
apps/package/handlers/launchpad.py
|
import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
def pull(package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name().replace('https://code.launchpad.net/','')
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
|
import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
from package.handlers.base_handler import BaseHandler
class LaunchpadHandler(BaseHandler):
title = 'Launchpad'
url = 'https://code.launchpad.net'
user_url = 'https://launchpad.net/~%s'
repo_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
slug_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
def pull(self, package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name()
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
|
Refactor Launchpad handler to use BaseHandler.
|
Refactor Launchpad handler to use BaseHandler.
|
Python
|
mit
|
QLGu/djangopackages,pydanny/djangopackages,audreyr/opencomparison,nanuxbe/djangopackages,QLGu/djangopackages,miketheman/opencomparison,audreyr/opencomparison,QLGu/djangopackages,pydanny/djangopackages,cartwheelweb/packaginator,benracine/opencomparison,cartwheelweb/packaginator,nanuxbe/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,cartwheelweb/packaginator,miketheman/opencomparison,benracine/opencomparison
|
import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
def pull(package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name().replace('https://code.launchpad.net/','')
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
Refactor Launchpad handler to use BaseHandler.
|
import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
from package.handlers.base_handler import BaseHandler
class LaunchpadHandler(BaseHandler):
title = 'Launchpad'
url = 'https://code.launchpad.net'
user_url = 'https://launchpad.net/~%s'
repo_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
slug_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
def pull(self, package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name()
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
|
<commit_before>import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
def pull(package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name().replace('https://code.launchpad.net/','')
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
<commit_msg>Refactor Launchpad handler to use BaseHandler.<commit_after>
|
import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
from package.handlers.base_handler import BaseHandler
class LaunchpadHandler(BaseHandler):
title = 'Launchpad'
url = 'https://code.launchpad.net'
user_url = 'https://launchpad.net/~%s'
repo_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
slug_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
def pull(self, package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name()
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
|
import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
def pull(package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name().replace('https://code.launchpad.net/','')
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
Refactor Launchpad handler to use BaseHandler.import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
from package.handlers.base_handler import BaseHandler
class LaunchpadHandler(BaseHandler):
title = 'Launchpad'
url = 'https://code.launchpad.net'
user_url = 'https://launchpad.net/~%s'
repo_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
slug_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
def pull(self, package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name()
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
|
<commit_before>import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
def pull(package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name().replace('https://code.launchpad.net/','')
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
<commit_msg>Refactor Launchpad handler to use BaseHandler.<commit_after>import os
from django.conf import settings
from launchpadlib.launchpad import Launchpad
from package.handlers.base_handler import BaseHandler
class LaunchpadHandler(BaseHandler):
title = 'Launchpad'
url = 'https://code.launchpad.net'
user_url = 'https://launchpad.net/~%s'
repo_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
slug_regex = r'https://code.launchpad.net/[\w\-\_]+/([\w\-\_]+)/[\w\-\_]+/{0,1}'
def pull(self, package):
cachedir = getattr(settings, 'LAUNCHPAD_CACHE_DIR', os.path.join(settings.PROJECT_ROOT, 'lp-cache'))
launchpad = Launchpad.login_anonymously('djangopackages.com', 'production', cachedir)
repo_name = package.repo_name()
branch = launchpad.branches.getByUrl(url='lp:%s' % repo_name)
package.repo_description = branch.description or ''
package.repo_forks = len(branch.project.getBranches())
package.repo_watchers = len(branch.subscribers)
package.participants = branch.owner.name
return package
|
c80424ca0dd8d748f7da167a588582ee94986da0
|
polling_stations/apps/data_importers/management/commands/import_highland.py
|
polling_stations/apps/data_importers/management/commands/import_highland.py
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
return None
return super().station_record_to_dict(record)
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
record = record._replace(xordinate="", yordinate="")
return super().station_record_to_dict(record)
|
Set don't use station coords for geocoding
|
Set don't use station coords for geocoding
|
Python
|
bsd-3-clause
|
DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
return None
return super().station_record_to_dict(record)
Set don't use station coords for geocoding
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
record = record._replace(xordinate="", yordinate="")
return super().station_record_to_dict(record)
|
<commit_before>from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
return None
return super().station_record_to_dict(record)
<commit_msg>Set don't use station coords for geocoding<commit_after>
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
record = record._replace(xordinate="", yordinate="")
return super().station_record_to_dict(record)
|
from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
return None
return super().station_record_to_dict(record)
Set don't use station coords for geocodingfrom data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
record = record._replace(xordinate="", yordinate="")
return super().station_record_to_dict(record)
|
<commit_before>from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
return None
return super().station_record_to_dict(record)
<commit_msg>Set don't use station coords for geocoding<commit_after>from data_importers.management.commands import BaseDemocracyCountsCsvImporter
class Command(BaseDemocracyCountsCsvImporter):
council_id = "HLD"
addresses_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Districts.csv"
)
stations_name = (
"2022-05-05/2022-03-24T15:19:09.669660/H Democracy Club - Polling Stations.csv"
)
elections = ["2022-05-05"]
def address_record_to_dict(self, record):
if record.uprn in ["130131593"]:
return None
if record.postcode == "IV17 0QY":
return None
return super().address_record_to_dict(record)
def station_record_to_dict(self, record):
if record.stationcode in ["W01 001", "W05 053"]:
record = record._replace(xordinate="", yordinate="")
return super().station_record_to_dict(record)
|
9ef92435a94d01d963b25e10bfb681daf04df193
|
dbaas/integrations/iaas/manager.py
|
dbaas/integrations/iaas/manager.py
|
from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
|
from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
if database.is_in_quarantine:
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
|
Remove monitoring only after database quarantine
|
Remove monitoring only after database quarantine
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
Remove monitoring only after database quarantine
|
from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
if database.is_in_quarantine:
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
|
<commit_before>from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
<commit_msg>Remove monitoring only after database quarantine<commit_after>
|
from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
if database.is_in_quarantine:
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
|
from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
Remove monitoring only after database quarantinefrom dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
if database.is_in_quarantine:
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
|
<commit_before>from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
<commit_msg>Remove monitoring only after database quarantine<commit_after>from dbaas_cloudstack.provider import CloudStackProvider
from pre_provisioned.pre_provisioned_provider import PreProvisionedProvider
from integrations.monitoring.manager import MonitoringManager
import logging
LOG = logging.getLogger(__name__)
class IaaSManager():
@classmethod
def destroy_instance(cls, database, *args, **kwargs):
plan = database.plan
provider = plan.provider
if provider == plan.PREPROVISIONED:
LOG.info("Destroying pre provisioned database...")
PreProvisionedProvider().destroy_instance(database, *args, **kwargs)
elif provider == plan.CLOUDSTACK:
LOG.info("Destroying cloud stack instance...")
if database.is_in_quarantine:
MonitoringManager.remove_monitoring(database.databaseinfra)
CloudStackProvider().destroy_instance(database, *args, **kwargs)
@classmethod
def create_instance(cls, plan, environment, name):
if plan.provider == plan.PREPROVISIONED:
LOG.info("Creating pre provisioned instance...")
return PreProvisionedProvider().create_instance(plan, environment)
elif plan.provider == plan.CLOUDSTACK:
LOG.info("Creating cloud stack instance...")
databaseinfra = CloudStackProvider().create_instance(plan, environment, name)
if databaseinfra is not None:
MonitoringManager.create_monitoring(databaseinfra)
return databaseinfra
|
5e1e85c3de2183c3bf0d497a161356044e1e36c1
|
admin.py
|
admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
|
Disable date_hierarchy for now since it requires tzinfo in MySQL
|
Disable date_hierarchy for now since it requires tzinfo in MySQL
|
Python
|
mit
|
mback2k/django-app-announcements
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
Disable date_hierarchy for now since it requires tzinfo in MySQL
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
<commit_msg>Disable date_hierarchy for now since it requires tzinfo in MySQL<commit_after>
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
Disable date_hierarchy for now since it requires tzinfo in MySQL# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
<commit_msg>Disable date_hierarchy for now since it requires tzinfo in MySQL<commit_after># -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Announcement
class AnnouncementAdmin(admin.ModelAdmin):
search_fields = ('message', 'extra_tags')
list_display = ('message', 'extra_tags',
'level', 'is_active',
'crdate', 'tstamp')
list_filter = ('level', 'is_active',
'crdate', 'tstamp')
#date_hierarchy = 'crdate'
admin.site.register(Announcement, AnnouncementAdmin)
|
9c4ecf0b72d86ae113fa13f210c543120635b73e
|
board.py
|
board.py
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return false
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return true
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == self.rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return True
|
Fix init method, self.rows and capital booleans.
|
Fix init method, self.rows and capital booleans.
|
Python
|
mit
|
isaacarvestad/four-in-a-row
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return false
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return true
Fix init method, self.rows and capital booleans.
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == self.rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return True
|
<commit_before>import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return false
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return true
<commit_msg>Fix init method, self.rows and capital booleans.<commit_after>
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == self.rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return True
|
import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return false
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return true
Fix init method, self.rows and capital booleans.import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == self.rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return True
|
<commit_before>import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return false
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return true
<commit_msg>Fix init method, self.rows and capital booleans.<commit_after>import numpy
"""
Board represents a four in a row game board.
Author: Isaac Arvestad
"""
class Board:
"""
Initializes the game with a certain number of rows
and columns.
"""
def __init__(self, rows, columns):
self.rows = rows
self.columns = columns
self.boardMatrix = numpy.zeros((rows, columns))
"""
Attempts to add a piece to a certain column. If the column is
full the move is illegal and false is returned, otherwise true
is returned.
"""
def addPiece(self, column, value):
"Check if column is full."
if self.boardMatrix.item(0,column) != 0:
return False
"Place piece."
for y in range(self.rows):
currentValue = self.boardMatrix.item(y, column)
if currentValue == 0:
if y == self.rows - 1:
self.boardMatrix.itemset((y, column), value)
else:
continue
return True
|
73f7f64ff5a29d5fa007ad44f2d68c6dc2ae65d7
|
sql/src/test/BugTracker/Tests/connect_crash.SF-1436626.py
|
sql/src/test/BugTracker/Tests/connect_crash.SF-1436626.py
|
import os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = os.popen(srvcmd, 'w')
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = os.popen(cltcmd, 'w')
clt.write('select 1;\n')
clt.close()
srv.close()
main()
|
import subprocess, os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = subprocess.Popen(srvcmd, shell = True, stdin = subprocess.PIPE)
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = subprocess.Popen(cltcmd, shell = True, stdin = subprocess.PIPE)
clt.stdin.write('select 1;\n')
clt.communicate()
srv.communicate()
main()
|
Use the subprocess module to start processes with pipes. This seems to fix the lack of output on Windows.
|
Use the subprocess module to start processes with pipes.
This seems to fix the lack of output on Windows.
|
Python
|
mpl-2.0
|
zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb,zyzyis/monetdb
|
import os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = os.popen(srvcmd, 'w')
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = os.popen(cltcmd, 'w')
clt.write('select 1;\n')
clt.close()
srv.close()
main()
Use the subprocess module to start processes with pipes.
This seems to fix the lack of output on Windows.
|
import subprocess, os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = subprocess.Popen(srvcmd, shell = True, stdin = subprocess.PIPE)
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = subprocess.Popen(cltcmd, shell = True, stdin = subprocess.PIPE)
clt.stdin.write('select 1;\n')
clt.communicate()
srv.communicate()
main()
|
<commit_before>import os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = os.popen(srvcmd, 'w')
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = os.popen(cltcmd, 'w')
clt.write('select 1;\n')
clt.close()
srv.close()
main()
<commit_msg>Use the subprocess module to start processes with pipes.
This seems to fix the lack of output on Windows.<commit_after>
|
import subprocess, os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = subprocess.Popen(srvcmd, shell = True, stdin = subprocess.PIPE)
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = subprocess.Popen(cltcmd, shell = True, stdin = subprocess.PIPE)
clt.stdin.write('select 1;\n')
clt.communicate()
srv.communicate()
main()
|
import os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = os.popen(srvcmd, 'w')
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = os.popen(cltcmd, 'w')
clt.write('select 1;\n')
clt.close()
srv.close()
main()
Use the subprocess module to start processes with pipes.
This seems to fix the lack of output on Windows.import subprocess, os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = subprocess.Popen(srvcmd, shell = True, stdin = subprocess.PIPE)
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = subprocess.Popen(cltcmd, shell = True, stdin = subprocess.PIPE)
clt.stdin.write('select 1;\n')
clt.communicate()
srv.communicate()
main()
|
<commit_before>import os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = os.popen(srvcmd, 'w')
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = os.popen(cltcmd, 'w')
clt.write('select 1;\n')
clt.close()
srv.close()
main()
<commit_msg>Use the subprocess module to start processes with pipes.
This seems to fix the lack of output on Windows.<commit_after>import subprocess, os, time
def main():
srvcmd = '%s --dbname "%s" --dbinit "include sql;"' % (os.getenv('MSERVER'),os.getenv('TSTDB'))
srv = subprocess.Popen(srvcmd, shell = True, stdin = subprocess.PIPE)
time.sleep(10) # give server time to start
cltcmd = os.getenv('SQL_CLIENT')
clt = subprocess.Popen(cltcmd, shell = True, stdin = subprocess.PIPE)
clt.stdin.write('select 1;\n')
clt.communicate()
srv.communicate()
main()
|
20541c6c22f08c24ee0b984b34d66acf03f3a529
|
src/pythonModules/fourgp_cannon/fourgp_cannon/__init__.py
|
src/pythonModules/fourgp_cannon/fourgp_cannon/__init__.py
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
from .cannon_instance import \
CannonInstance, \
CannonInstanceWithContinuumNormalisation, \
CannonInstanceWithRunningMeanNormalisation
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
#from .cannon_instance import \
# CannonInstance, \
# CannonInstanceWithContinuumNormalisation, \
# CannonInstanceWithRunningMeanNormalisation
CannonInstance = CannonInstanceWithContinuumNormalisation = CannonInstanceWithRunningMeanNormalisation = None
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
Tweak to remove dependency on new Cannon
|
Tweak to remove dependency on new Cannon
|
Python
|
mit
|
dcf21/4most-4gp,dcf21/4most-4gp,dcf21/4most-4gp
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
from .cannon_instance import \
CannonInstance, \
CannonInstanceWithContinuumNormalisation, \
CannonInstanceWithRunningMeanNormalisation
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
Tweak to remove dependency on new Cannon
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
#from .cannon_instance import \
# CannonInstance, \
# CannonInstanceWithContinuumNormalisation, \
# CannonInstanceWithRunningMeanNormalisation
CannonInstance = CannonInstanceWithContinuumNormalisation = CannonInstanceWithRunningMeanNormalisation = None
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
<commit_before>#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
from .cannon_instance import \
CannonInstance, \
CannonInstanceWithContinuumNormalisation, \
CannonInstanceWithRunningMeanNormalisation
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
<commit_msg>Tweak to remove dependency on new Cannon<commit_after>
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
#from .cannon_instance import \
# CannonInstance, \
# CannonInstanceWithContinuumNormalisation, \
# CannonInstanceWithRunningMeanNormalisation
CannonInstance = CannonInstanceWithContinuumNormalisation = CannonInstanceWithRunningMeanNormalisation = None
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
from .cannon_instance import \
CannonInstance, \
CannonInstanceWithContinuumNormalisation, \
CannonInstanceWithRunningMeanNormalisation
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
Tweak to remove dependency on new Cannon#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
#from .cannon_instance import \
# CannonInstance, \
# CannonInstanceWithContinuumNormalisation, \
# CannonInstanceWithRunningMeanNormalisation
CannonInstance = CannonInstanceWithContinuumNormalisation = CannonInstanceWithRunningMeanNormalisation = None
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
<commit_before>#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
from .cannon_instance import \
CannonInstance, \
CannonInstanceWithContinuumNormalisation, \
CannonInstanceWithRunningMeanNormalisation
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
<commit_msg>Tweak to remove dependency on new Cannon<commit_after>#!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
import logging
from numpy import RankWarning
from warnings import simplefilter
#from .cannon_instance import \
# CannonInstance, \
# CannonInstanceWithContinuumNormalisation, \
# CannonInstanceWithRunningMeanNormalisation
CannonInstance = CannonInstanceWithContinuumNormalisation = CannonInstanceWithRunningMeanNormalisation = None
from .cannon_instance_release_2018_01_09_1 import \
CannonInstance_2018_01_09
__version__ = "0.1.0"
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO) # TODO: Remove this when stable.
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter("%(asctime)s [%(levelname)-8s] %(message)s"))
logger.addHandler(handler)
simplefilter("ignore", RankWarning)
simplefilter("ignore", RuntimeWarning)
|
db0921e0242d478d29115179b9da2ffcd3fa35fb
|
micromanager/resources/__init__.py
|
micromanager/resources/__init__.py
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data)
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data, **kargs):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data, **kargs)
|
Allow kargs in resource factory
|
Allow kargs in resource factory
|
Python
|
apache-2.0
|
forseti-security/resource-policy-evaluation-library
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data)
Allow kargs in resource factory
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data, **kargs):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data, **kargs)
|
<commit_before>from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data)
<commit_msg>Allow kargs in resource factory<commit_after>
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data, **kargs):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data, **kargs)
|
from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data)
Allow kargs in resource factoryfrom .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data, **kargs):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data, **kargs)
|
<commit_before>from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data)
<commit_msg>Allow kargs in resource factory<commit_after>from .base import ResourceBase # noqa F401
from .bucket import Bucket # noqa F401
from .sql import SQLInstance # noqa F401
from .bigquery import BQDataset # noqa F401
class Resource():
@staticmethod
def factory(resource_data, **kargs):
resource_kind_map = {
'storage#bucket': Bucket,
'bigquery#dataset': BQDataset,
'sql#instance': SQLInstance
}
kind = resource_data.get('resource_kind')
if not kind:
assert 0, 'Unrecognized resource'
if kind not in resource_kind_map:
assert 0, 'Unrecognized resource'
cls = resource_kind_map.get(kind)
return cls(resource_data, **kargs)
|
6e40897f935f504bb0bf3e60fbf9d1ef54786d2e
|
smoked/test/url.py
|
smoked/test/url.py
|
# coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
assert urlopen(url).getcode() == expected_code
|
# coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
from django.utils.six.moves.urllib.error import HTTPError
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
try:
assert urlopen(url).getcode() == expected_code
except HTTPError as e:
assert e.code == expected_code
|
Fix for testing error pages
|
Fix for testing error pages
calling url_available(url='http://doesnotexist.com', expected_code=404) # raise exception
|
Python
|
mit
|
djentlemen/django-smoked
|
# coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
assert urlopen(url).getcode() == expected_code
Fix for testing error pages
calling url_available(url='http://doesnotexist.com', expected_code=404) # raise exception
|
# coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
from django.utils.six.moves.urllib.error import HTTPError
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
try:
assert urlopen(url).getcode() == expected_code
except HTTPError as e:
assert e.code == expected_code
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
assert urlopen(url).getcode() == expected_code
<commit_msg>Fix for testing error pages
calling url_available(url='http://doesnotexist.com', expected_code=404) # raise exception<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
from django.utils.six.moves.urllib.error import HTTPError
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
try:
assert urlopen(url).getcode() == expected_code
except HTTPError as e:
assert e.code == expected_code
|
# coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
assert urlopen(url).getcode() == expected_code
Fix for testing error pages
calling url_available(url='http://doesnotexist.com', expected_code=404) # raise exception# coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
from django.utils.six.moves.urllib.error import HTTPError
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
try:
assert urlopen(url).getcode() == expected_code
except HTTPError as e:
assert e.code == expected_code
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
assert urlopen(url).getcode() == expected_code
<commit_msg>Fix for testing error pages
calling url_available(url='http://doesnotexist.com', expected_code=404) # raise exception<commit_after># coding: utf-8
from __future__ import unicode_literals
from django.utils.six.moves.urllib.request import urlopen
from django.utils.six.moves.urllib.error import HTTPError
def url_available(url=None, expected_code=200):
""" Check availability (HTTP response code) of single resource """
try:
assert urlopen(url).getcode() == expected_code
except HTTPError as e:
assert e.code == expected_code
|
29a737c8d964ee54583e1e7f86a1c2cb6d09abb6
|
speciesexplorer.py
|
speciesexplorer.py
|
import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
application = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
|
import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
class GoogleWebmasterVerifier(webapp2.RequestHandler):
def get(self):
self.response.write(file('google7e0693b4ccda33f7.html').read())
application = webapp2.WSGIApplication([
('/google7e0693b4ccda33f7.html', GoogleWebmasterVerifier),
('/', MainPage)
], debug=True)
|
Add Google Webmaster verification handler.
|
Add Google Webmaster verification handler.
|
Python
|
apache-2.0
|
harokb/SpeciesTree,harokb/SpeciesTree
|
import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
application = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
Add Google Webmaster verification handler.
|
import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
class GoogleWebmasterVerifier(webapp2.RequestHandler):
def get(self):
self.response.write(file('google7e0693b4ccda33f7.html').read())
application = webapp2.WSGIApplication([
('/google7e0693b4ccda33f7.html', GoogleWebmasterVerifier),
('/', MainPage)
], debug=True)
|
<commit_before>import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
application = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
<commit_msg>Add Google Webmaster verification handler.<commit_after>
|
import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
class GoogleWebmasterVerifier(webapp2.RequestHandler):
def get(self):
self.response.write(file('google7e0693b4ccda33f7.html').read())
application = webapp2.WSGIApplication([
('/google7e0693b4ccda33f7.html', GoogleWebmasterVerifier),
('/', MainPage)
], debug=True)
|
import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
application = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
Add Google Webmaster verification handler.import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
class GoogleWebmasterVerifier(webapp2.RequestHandler):
def get(self):
self.response.write(file('google7e0693b4ccda33f7.html').read())
application = webapp2.WSGIApplication([
('/google7e0693b4ccda33f7.html', GoogleWebmasterVerifier),
('/', MainPage)
], debug=True)
|
<commit_before>import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
application = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
<commit_msg>Add Google Webmaster verification handler.<commit_after>import webapp2
class MainPage(webapp2.RequestHandler):
def get(self):
self.response.write(file('index.html').read())
class GoogleWebmasterVerifier(webapp2.RequestHandler):
def get(self):
self.response.write(file('google7e0693b4ccda33f7.html').read())
application = webapp2.WSGIApplication([
('/google7e0693b4ccda33f7.html', GoogleWebmasterVerifier),
('/', MainPage)
], debug=True)
|
f36a46766b3111d9acb3a11f30e170629989913e
|
OnlineParticipationDataset/spiders/Bonn2011Spider.py
|
OnlineParticipationDataset/spiders/Bonn2011Spider.py
|
import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
yield scrapy.Request(thread,callback=self.parse_thread)
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.,"vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
|
import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
thread_url = thread.xpath('//div[@class="col_01"]/h3/a/@href').extract_first()
yield scrapy.Request(response.urljoin(thread_url),callback=self.parse_thread)
# yield {
# 'id' : thread.css('h2::text').extract_first(),
# 'title' : thread.css('div.col_01 h3 a::text').extract_first(),
# 'link' : thread.css('div.col_01 h3 a::attr(href)').extract_first()
# }
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.="vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
|
Correct call of second parse method
|
Correct call of second parse method
|
Python
|
mit
|
Liebeck/OnlineParticipationDatasets
|
import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
yield scrapy.Request(thread,callback=self.parse_thread)
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.,"vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
Correct call of second parse method
|
import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
thread_url = thread.xpath('//div[@class="col_01"]/h3/a/@href').extract_first()
yield scrapy.Request(response.urljoin(thread_url),callback=self.parse_thread)
# yield {
# 'id' : thread.css('h2::text').extract_first(),
# 'title' : thread.css('div.col_01 h3 a::text').extract_first(),
# 'link' : thread.css('div.col_01 h3 a::attr(href)').extract_first()
# }
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.="vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
|
<commit_before>import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
yield scrapy.Request(thread,callback=self.parse_thread)
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.,"vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
<commit_msg>Correct call of second parse method<commit_after>
|
import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
thread_url = thread.xpath('//div[@class="col_01"]/h3/a/@href').extract_first()
yield scrapy.Request(response.urljoin(thread_url),callback=self.parse_thread)
# yield {
# 'id' : thread.css('h2::text').extract_first(),
# 'title' : thread.css('div.col_01 h3 a::text').extract_first(),
# 'link' : thread.css('div.col_01 h3 a::attr(href)').extract_first()
# }
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.="vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
|
import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
yield scrapy.Request(thread,callback=self.parse_thread)
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.,"vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
Correct call of second parse methodimport scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
thread_url = thread.xpath('//div[@class="col_01"]/h3/a/@href').extract_first()
yield scrapy.Request(response.urljoin(thread_url),callback=self.parse_thread)
# yield {
# 'id' : thread.css('h2::text').extract_first(),
# 'title' : thread.css('div.col_01 h3 a::text').extract_first(),
# 'link' : thread.css('div.col_01 h3 a::attr(href)').extract_first()
# }
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.="vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
|
<commit_before>import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
yield scrapy.Request(thread,callback=self.parse_thread)
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.,"vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
<commit_msg>Correct call of second parse method<commit_after>import scrapy
class Bonn2011Spider(scrapy.Spider):
name = "bonn2011"
start_urls = ['http://bonn-packts-an-2011.de/www.bonn-packts-an.de/dito/forumc0d2.html']
def parse(self, response):
for thread in response.css('div.vorschlag.buergervorschlag'):
thread_url = thread.xpath('//div[@class="col_01"]/h3/a/@href').extract_first()
yield scrapy.Request(response.urljoin(thread_url),callback=self.parse_thread)
# yield {
# 'id' : thread.css('h2::text').extract_first(),
# 'title' : thread.css('div.col_01 h3 a::text').extract_first(),
# 'link' : thread.css('div.col_01 h3 a::attr(href)').extract_first()
# }
# Here: Parse next Site
next_page = response.xpath('//div[@class="list_pages"]/a[.="vor"]/@href').extract_first()
if next_page:
yield scrapy.Request(
response.urljoin(next_page),
callback=self.parse
)
def parse_thread(self, response):
# TODO: create Item, maybe with ItemLoader
yield {
'id' : response.xpath('//h2/text()').extract_first(),
'title' : response.xpath('//div[@class="col_01"]/h3/text()').extract_first(),
}
|
120a93a867fcad7228a4befbf16a371f2210a852
|
djangoautoconf/cmd_handler_base/database_connection_maintainer.py
|
djangoautoconf/cmd_handler_base/database_connection_maintainer.py
|
import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
close_old_connections()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
import thread
import time
from datetime import datetime
# from django.db import close_old_connections
from django.db import connection
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
else:
self.db_timeout = db_timeout
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
DatabaseConnectionMaintainer.close_database_connections()
@staticmethod
def close_database_connections():
# close_old_connections()
connection.close()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
DatabaseConnectionMaintainer.close_database_connections()
print "db connection closed", datetime.now()
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
Use connection.close to close database connections.
|
Use connection.close to close database connections.
|
Python
|
bsd-3-clause
|
weijia/djangoautoconf,weijia/djangoautoconf
|
import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
close_old_connections()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
Use connection.close to close database connections.
|
import thread
import time
from datetime import datetime
# from django.db import close_old_connections
from django.db import connection
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
else:
self.db_timeout = db_timeout
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
DatabaseConnectionMaintainer.close_database_connections()
@staticmethod
def close_database_connections():
# close_old_connections()
connection.close()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
DatabaseConnectionMaintainer.close_database_connections()
print "db connection closed", datetime.now()
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
<commit_before>import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
close_old_connections()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
<commit_msg>Use connection.close to close database connections.<commit_after>
|
import thread
import time
from datetime import datetime
# from django.db import close_old_connections
from django.db import connection
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
else:
self.db_timeout = db_timeout
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
DatabaseConnectionMaintainer.close_database_connections()
@staticmethod
def close_database_connections():
# close_old_connections()
connection.close()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
DatabaseConnectionMaintainer.close_database_connections()
print "db connection closed", datetime.now()
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
close_old_connections()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
Use connection.close to close database connections.import thread
import time
from datetime import datetime
# from django.db import close_old_connections
from django.db import connection
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
else:
self.db_timeout = db_timeout
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
DatabaseConnectionMaintainer.close_database_connections()
@staticmethod
def close_database_connections():
# close_old_connections()
connection.close()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
DatabaseConnectionMaintainer.close_database_connections()
print "db connection closed", datetime.now()
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
<commit_before>import thread
import time
from django.db import close_old_connections
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
close_old_connections()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
close_old_connections()
print "db connection closed"
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
<commit_msg>Use connection.close to close database connections.<commit_after>import thread
import time
from datetime import datetime
# from django.db import close_old_connections
from django.db import connection
class DatabaseConnectionMaintainer(object):
DB_TIMEOUT_SECONDS = 60*60
def __init__(self, db_timeout=None):
self.clients = set()
# self.device_to_protocol = {}
self.is_recent_db_change_occurred = False
if db_timeout is None:
self.db_timeout = self.DB_TIMEOUT_SECONDS
else:
self.db_timeout = db_timeout
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
@staticmethod
def force_close_db():
print "force close db"
DatabaseConnectionMaintainer.close_database_connections()
@staticmethod
def close_database_connections():
# close_old_connections()
connection.close()
def close_db_connection_if_needed(self):
if not self.is_recent_db_change_occurred:
DatabaseConnectionMaintainer.close_database_connections()
print "db connection closed", datetime.now()
self.is_recent_db_change_occurred = False
self.delay_and_execute(self.db_timeout, self.close_db_connection_if_needed)
def refresh_timeout(self):
self.is_recent_db_change_occurred = True
def delay_and_execute(self, timeout, callback):
thread.start_new_thread(self.periodical_task, (timeout, callback))
# noinspection PyMethodMayBeStatic
def periodical_task(self, timeout, callback):
time.sleep(timeout)
callback()
|
23f2be0d47c5bf8de48e614be6927dcbb5df06fc
|
cmsplugin_filer_file/cms_plugins.py
|
cmsplugin_filer_file/cms_plugins.py
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext_lazy as _
import models
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object': instance,
'placeholder': placeholder
})
return context
plugin_pool.register_plugin(FilerFilePlugin)
|
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
import models
from django.conf import settings
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder
})
return context
def icon_src(self, instance):
file_icon = instance.get_icon_url()
if file_icon: return file_icon
return settings.CMS_MEDIA_URL + u"images/plugins/file.png"
plugin_pool.register_plugin(FilerFilePlugin)
|
Revert "FilerFilePlugin.icon_src is also obsolete"
|
Revert "FilerFilePlugin.icon_src is also obsolete"
This reverts commit 6f9609a350fcac101eb28c08f8499f11d753f292.
|
Python
|
bsd-3-clause
|
pbs/cmsplugin-filer,skirsdeda/cmsplugin-filer,wlanslovenija/cmsplugin-filer,divio/cmsplugin-filer,isotoma/cmsplugin-filer,eliasp/cmsplugin-filer,brightinteractive/cmsplugin-filer,brightinteractive/cmsplugin-filer,grigoryk/cmsplugin-filer,pbs/cmsplugin-filer,skirsdeda/cmsplugin-filer,dreipol/cmsplugin-filer,dreipol/cmsplugin-filer,douwevandermeij/cmsplugin-filer,ImaginaryLandscape/cmsplugin-filer,ImaginaryLandscape/cmsplugin-filer,jrutila/cmsplugin-filer,yvess/cmsplugin-filer,isotoma/cmsplugin-filer,alsoicode/cmsplugin-filer,yakky/cmsplugin-filer,divio/cmsplugin-filer,eliasp/cmsplugin-filer,yakky/cmsplugin-filer,yvess/cmsplugin-filer,yvess/cmsplugin-filer,pbs/cmsplugin-filer,divio/cmsplugin-filer,yvess/cmsplugin-filer,divio/cmsplugin-filer,stefanfoulis/cmsplugin-filer,alsoicode/cmsplugin-filer,stefanfoulis/cmsplugin-filer,brightinteractive/cmsplugin-filer,centralniak/cmsplugin-filer,creimers/cmsplugin-filer,jrutila/cmsplugin-filer,isotoma/cmsplugin-filer,NB-Dev/cmsplugin-filer,sephii/cmsplugin-filer,douwevandermeij/cmsplugin-filer,jschneier/cmsplugin-filer,creimers/cmsplugin-filer,pbs/cmsplugin-filer,stefanfoulis/cmsplugin-filer,centralniak/cmsplugin-filer,eliasp/cmsplugin-filer,grigoryk/cmsplugin-filer,jschneier/cmsplugin-filer,alsoicode/cmsplugin-filer,jschneier/cmsplugin-filer,skirsdeda/cmsplugin-filer,wlanslovenija/cmsplugin-filer,douwevandermeij/cmsplugin-filer,creimers/cmsplugin-filer,sephii/cmsplugin-filer,nephila/cmsplugin-filer,NB-Dev/cmsplugin-filer,stefanfoulis/cmsplugin-filer,wlanslovenija/cmsplugin-filer,sephii/cmsplugin-filer,nephila/cmsplugin-filer,neoascetic/cmsplugin-filer,nephila/cmsplugin-filer
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext_lazy as _
import models
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object': instance,
'placeholder': placeholder
})
return context
plugin_pool.register_plugin(FilerFilePlugin)
Revert "FilerFilePlugin.icon_src is also obsolete"
This reverts commit 6f9609a350fcac101eb28c08f8499f11d753f292.
|
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
import models
from django.conf import settings
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder
})
return context
def icon_src(self, instance):
file_icon = instance.get_icon_url()
if file_icon: return file_icon
return settings.CMS_MEDIA_URL + u"images/plugins/file.png"
plugin_pool.register_plugin(FilerFilePlugin)
|
<commit_before>from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext_lazy as _
import models
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object': instance,
'placeholder': placeholder
})
return context
plugin_pool.register_plugin(FilerFilePlugin)
<commit_msg>Revert "FilerFilePlugin.icon_src is also obsolete"
This reverts commit 6f9609a350fcac101eb28c08f8499f11d753f292.<commit_after>
|
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
import models
from django.conf import settings
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder
})
return context
def icon_src(self, instance):
file_icon = instance.get_icon_url()
if file_icon: return file_icon
return settings.CMS_MEDIA_URL + u"images/plugins/file.png"
plugin_pool.register_plugin(FilerFilePlugin)
|
from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext_lazy as _
import models
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object': instance,
'placeholder': placeholder
})
return context
plugin_pool.register_plugin(FilerFilePlugin)
Revert "FilerFilePlugin.icon_src is also obsolete"
This reverts commit 6f9609a350fcac101eb28c08f8499f11d753f292.from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
import models
from django.conf import settings
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder
})
return context
def icon_src(self, instance):
file_icon = instance.get_icon_url()
if file_icon: return file_icon
return settings.CMS_MEDIA_URL + u"images/plugins/file.png"
plugin_pool.register_plugin(FilerFilePlugin)
|
<commit_before>from cms.plugin_base import CMSPluginBase
from cms.plugin_pool import plugin_pool
from django.utils.translation import ugettext_lazy as _
import models
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object': instance,
'placeholder': placeholder
})
return context
plugin_pool.register_plugin(FilerFilePlugin)
<commit_msg>Revert "FilerFilePlugin.icon_src is also obsolete"
This reverts commit 6f9609a350fcac101eb28c08f8499f11d753f292.<commit_after>from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
import models
from django.conf import settings
class FilerFilePlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFile
name = _("File")
render_template = "cmsplugin_filer_file/file.html"
text_enabled = True
def render(self, context, instance, placeholder):
context.update({
'object':instance,
'placeholder':placeholder
})
return context
def icon_src(self, instance):
file_icon = instance.get_icon_url()
if file_icon: return file_icon
return settings.CMS_MEDIA_URL + u"images/plugins/file.png"
plugin_pool.register_plugin(FilerFilePlugin)
|
29c20b0a55b0f003a5a5dd83d5d0f177eca6a5c6
|
valuenetwork/valueaccounting/migrations/0013_auto_20180530_2053.py
|
valuenetwork/valueaccounting/migrations/0013_auto_20180530_2053.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('valueaccounting', '0012_auto_20170717_1841'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
migrations.AlterField(
model_name='economicresourcetype',
name='behavior',
field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
# and then modified by fosterlynn to remove the migration that duplicates a previous migration, and change the dependency to that migration
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
#('valueaccounting', '0012_auto_20170717_1841')
('valueaccounting', '0013_auto_20171106_1539'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
#migrations.AlterField(
# model_name='economicresourcetype',
# name='behavior',
# field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
#),
]
|
Fix to migration dependency issue because of missing a migration in the api-extensions branch. Removed duplicate change and changed the dependency.
|
Fix to migration dependency issue because of missing a migration in the api-extensions branch. Removed duplicate change and changed the dependency.
|
Python
|
agpl-3.0
|
FreedomCoop/valuenetwork,FreedomCoop/valuenetwork,FreedomCoop/valuenetwork,FreedomCoop/valuenetwork
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('valueaccounting', '0012_auto_20170717_1841'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
migrations.AlterField(
model_name='economicresourcetype',
name='behavior',
field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
),
]
Fix to migration dependency issue because of missing a migration in the api-extensions branch. Removed duplicate change and changed the dependency.
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
# and then modified by fosterlynn to remove the migration that duplicates a previous migration, and change the dependency to that migration
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
#('valueaccounting', '0012_auto_20170717_1841')
('valueaccounting', '0013_auto_20171106_1539'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
#migrations.AlterField(
# model_name='economicresourcetype',
# name='behavior',
# field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
#),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('valueaccounting', '0012_auto_20170717_1841'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
migrations.AlterField(
model_name='economicresourcetype',
name='behavior',
field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
),
]
<commit_msg>Fix to migration dependency issue because of missing a migration in the api-extensions branch. Removed duplicate change and changed the dependency.<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
# and then modified by fosterlynn to remove the migration that duplicates a previous migration, and change the dependency to that migration
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
#('valueaccounting', '0012_auto_20170717_1841')
('valueaccounting', '0013_auto_20171106_1539'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
#migrations.AlterField(
# model_name='economicresourcetype',
# name='behavior',
# field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
#),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('valueaccounting', '0012_auto_20170717_1841'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
migrations.AlterField(
model_name='economicresourcetype',
name='behavior',
field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
),
]
Fix to migration dependency issue because of missing a migration in the api-extensions branch. Removed duplicate change and changed the dependency.# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
# and then modified by fosterlynn to remove the migration that duplicates a previous migration, and change the dependency to that migration
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
#('valueaccounting', '0012_auto_20170717_1841')
('valueaccounting', '0013_auto_20171106_1539'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
#migrations.AlterField(
# model_name='economicresourcetype',
# name='behavior',
# field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
#),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('valueaccounting', '0012_auto_20170717_1841'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
migrations.AlterField(
model_name='economicresourcetype',
name='behavior',
field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
),
]
<commit_msg>Fix to migration dependency issue because of missing a migration in the api-extensions branch. Removed duplicate change and changed the dependency.<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-05-30 20:53
# and then modified by fosterlynn to remove the migration that duplicates a previous migration, and change the dependency to that migration
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
#('valueaccounting', '0012_auto_20170717_1841')
('valueaccounting', '0013_auto_20171106_1539'),
]
operations = [
migrations.AddField(
model_name='process',
name='plan',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='processes', to='valueaccounting.Order', verbose_name='plan'),
),
#migrations.AlterField(
# model_name='economicresourcetype',
# name='behavior',
# field=models.CharField(choices=[(b'work', 'Type of Work'), (b'account', 'Virtual Account'), (b'dig_curr', 'Digital Currency'), (b'dig_acct', 'Digital Currency Address'), (b'dig_wallet', 'Digital Currency Wallet'), (b'consumed', 'Produced/Changed + Consumed'), (b'used', 'Produced/Changed + Used'), (b'cited', 'Produced/Changed + Cited'), (b'produced', 'Produced/Changed only'), (b'other', 'Other')], default=b'other', max_length=12, verbose_name='behavior'),
#),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.