commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4048d697628ca372d981d888d9ffda4b202c56fa | config.py | config.py | DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = TRACKDIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
| DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = DATADIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
| Move lockfile to root datadir. | Move lockfile to root datadir.
| Python | mit | liffiton/ATLeS,liffiton/ATLeS,liffiton/ATLeS,liffiton/ATLeS | DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = TRACKDIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
Move lockfile to root datadir. | DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = DATADIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
| <commit_before>DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = TRACKDIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
<commit_msg>Move lockfile to root datadir.<commit_after> | DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = DATADIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
| DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = TRACKDIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
Move lockfile to root datadir.DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = DATADIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
| <commit_before>DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = TRACKDIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
<commit_msg>Move lockfile to root datadir.<commit_after>DATADIR = "data/"
TRACKDIR = DATADIR + "tracks/"
LOCKFILE = DATADIR + "current_experiment.lock"
PLOTDIR = DATADIR + "plots/"
ARCHIVEDIR = DATADIR + "tracks_archive/"
DBGFRAMEDIR = DATADIR + "debug_frames/"
INIDIR = "ini/"
EXPSCRIPT = "python fishbox.py"
APPDIR = "fishweb/"
TEMPLATEDIR = APPDIR + "views/"
STATICDIR = APPDIR + "static/"
|
9d73be469a006ae13cd847e17a62e7740bfd3d4e | dlstats/fetchers/__init__.py | dlstats/fetchers/__init__.py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank
| #! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank, IMF
| Add IMF to the fetchers | Add IMF to the fetchers
| Python | agpl-3.0 | mmalter/dlstats,mmalter/dlstats,Widukind/dlstats,MichelJuillard/dlstats,Widukind/dlstats,MichelJuillard/dlstats,mmalter/dlstats,MichelJuillard/dlstats | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank
Add IMF to the fetchers | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank, IMF
| <commit_before>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank
<commit_msg>Add IMF to the fetchers<commit_after> | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank, IMF
| #! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank
Add IMF to the fetchers#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank, IMF
| <commit_before>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank
<commit_msg>Add IMF to the fetchers<commit_after>#! /usr/bin/env python3
# -*- coding: utf-8 -*-
from . import eurostat, insee, world_bank, IMF
|
bd2636db55396cac2ff6766593d5082562d865e2 | lightning/types/decorators.py | lightning/types/decorators.py | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
if not hasattr(self, 'session'):
self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | Create session if one doesn't exist | Create session if one doesn't exist
| Python | mit | peterkshultz/lightning-python,garretstuber/lightning-python,lightning-viz/lightning-python,garretstuber/lightning-python,garretstuber/lightning-python,lightning-viz/lightning-python,peterkshultz/lightning-python,peterkshultz/lightning-python | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgTypeCreate session if one doesn't exist | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
if not hasattr(self, 'session'):
self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | <commit_before>from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType<commit_msg>Create session if one doesn't exist<commit_after> | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
if not hasattr(self, 'session'):
self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgTypeCreate session if one doesn't existfrom lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
if not hasattr(self, 'session'):
self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | <commit_before>from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType<commit_msg>Create session if one doesn't exist<commit_after>from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
if not hasattr(self, 'session'):
self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType |
dc6c8b77ea944e97a9f49dd1f7ae2244f4cad2ca | bluebottle/test/factory_models/organizations.py | bluebottle/test/factory_models/organizations.py | import factory
from bluebottle.organizations.models import OrganizationContact, Organization, OrganizationMember
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
class OrganizationMemberFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationMember
user = factory.SubFactory(BlueBottleUserFactory)
function = 'owner'
organization = factory.SubFactory(OrganizationFactory)
| import factory
from bluebottle.organizations.models import OrganizationContact, Organization
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
| Remove unused org member in factory | Remove unused org member in factory
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | import factory
from bluebottle.organizations.models import OrganizationContact, Organization, OrganizationMember
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
class OrganizationMemberFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationMember
user = factory.SubFactory(BlueBottleUserFactory)
function = 'owner'
organization = factory.SubFactory(OrganizationFactory)
Remove unused org member in factory | import factory
from bluebottle.organizations.models import OrganizationContact, Organization
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
| <commit_before>import factory
from bluebottle.organizations.models import OrganizationContact, Organization, OrganizationMember
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
class OrganizationMemberFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationMember
user = factory.SubFactory(BlueBottleUserFactory)
function = 'owner'
organization = factory.SubFactory(OrganizationFactory)
<commit_msg>Remove unused org member in factory<commit_after> | import factory
from bluebottle.organizations.models import OrganizationContact, Organization
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
| import factory
from bluebottle.organizations.models import OrganizationContact, Organization, OrganizationMember
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
class OrganizationMemberFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationMember
user = factory.SubFactory(BlueBottleUserFactory)
function = 'owner'
organization = factory.SubFactory(OrganizationFactory)
Remove unused org member in factoryimport factory
from bluebottle.organizations.models import OrganizationContact, Organization
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
| <commit_before>import factory
from bluebottle.organizations.models import OrganizationContact, Organization, OrganizationMember
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
class OrganizationMemberFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationMember
user = factory.SubFactory(BlueBottleUserFactory)
function = 'owner'
organization = factory.SubFactory(OrganizationFactory)
<commit_msg>Remove unused org member in factory<commit_after>import factory
from bluebottle.organizations.models import OrganizationContact, Organization
from .geo import CountryFactory
from .accounts import BlueBottleUserFactory
class OrganizationFactory(factory.DjangoModelFactory):
class Meta(object):
model = Organization
name = factory.Sequence(lambda n: 'Organization_{0}'.format(n))
slug = factory.Sequence(lambda n: 'organization_{0}'.format(n))
address_line1 = "'s Gravenhekje 1a"
address_line2 = '1011 TG'
city = 'Amsterdam'
state = 'North Holland'
country = factory.SubFactory(CountryFactory, name='Netherlands')
postal_code = '1011TG'
# Contact
phone_number = '(+31) 20 715 8980'
website = 'http://onepercentclub.com'
email = 'info@onepercentclub.com'
class OrganizationContactFactory(factory.DjangoModelFactory):
class Meta(object):
model = OrganizationContact
name = factory.Sequence(lambda n: 'Contact_{0}'.format(n))
phone = factory.Sequence(lambda n: '555-{0}'.format(n))
email = factory.Sequence(lambda n: '{0}@example.com'.format(n))
owner = factory.SubFactory(BlueBottleUserFactory)
organization = factory.SubFactory(OrganizationFactory)
|
651f2d04dc82ea0e9c653280f0bd4a17bedcb88b | server_app/__main__.py | server_app/__main__.py | import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG)
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
| import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG))
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
| Make logger sort by date | Make logger sort by date
| Python | bsd-3-clause | jos0003/Chat,jos0003/Chat,jos0003/Chat,jos0003/Chat,jos0003/Chat | import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG)
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
Make logger sort by date | import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG))
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
| <commit_before>import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG)
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
<commit_msg>Make logger sort by date<commit_after> | import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG))
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
| import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG)
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
Make logger sort by dateimport sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG))
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
| <commit_before>import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG)
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
<commit_msg>Make logger sort by date<commit_after>import sys
import os
import logging
import time
if not os.path.exists(os.path.expanduser("~/.chatserver")):
os.makedirs(os.path.expanduser("~/.chatserver"))
logging.basicConfig(filename=os.path.expanduser("~/.chatserver/chat-"+time.strftime("%d-%m-%Y.log"), level=logging.DEBUG))
sys.stderr.close()
sys.stdout.close()
sys.stdin.close()
from app import app, db, main, socketio
db.create_all()
app.register_blueprint(main)
port = app.config['PORT']
if len(sys.argv) == 2:
port = int(sys.argv[1])
logging.info("Chat server is now running on 0.0.0.0:%r" % port)
socketio.run(app, host="0.0.0.0", port=port)
|
ee2cb8687f6e4ecddfef797cc63c7daa40731dc8 | user_management/models/tests/factories.py | user_management/models/tests/factories.py | import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
@classmethod
def _prepare(cls, create, **kwargs):
password = kwargs.pop('password', None)
user = super(UserFactory, cls)._prepare(create=False, **kwargs)
user.set_password(password)
user.raw_password = password
if create:
user.save()
return user
| import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
password = factory.PostGenerationMethodCall('set_password', None)
| Use PostGenerationMethodCall instead of _prepare. | Use PostGenerationMethodCall instead of _prepare.
| Python | bsd-2-clause | incuna/django-user-management,incuna/django-user-management | import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
@classmethod
def _prepare(cls, create, **kwargs):
password = kwargs.pop('password', None)
user = super(UserFactory, cls)._prepare(create=False, **kwargs)
user.set_password(password)
user.raw_password = password
if create:
user.save()
return user
Use PostGenerationMethodCall instead of _prepare. | import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
password = factory.PostGenerationMethodCall('set_password', None)
| <commit_before>import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
@classmethod
def _prepare(cls, create, **kwargs):
password = kwargs.pop('password', None)
user = super(UserFactory, cls)._prepare(create=False, **kwargs)
user.set_password(password)
user.raw_password = password
if create:
user.save()
return user
<commit_msg>Use PostGenerationMethodCall instead of _prepare.<commit_after> | import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
password = factory.PostGenerationMethodCall('set_password', None)
| import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
@classmethod
def _prepare(cls, create, **kwargs):
password = kwargs.pop('password', None)
user = super(UserFactory, cls)._prepare(create=False, **kwargs)
user.set_password(password)
user.raw_password = password
if create:
user.save()
return user
Use PostGenerationMethodCall instead of _prepare.import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
password = factory.PostGenerationMethodCall('set_password', None)
| <commit_before>import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
@classmethod
def _prepare(cls, create, **kwargs):
password = kwargs.pop('password', None)
user = super(UserFactory, cls)._prepare(create=False, **kwargs)
user.set_password(password)
user.raw_password = password
if create:
user.save()
return user
<commit_msg>Use PostGenerationMethodCall instead of _prepare.<commit_after>import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
password = factory.PostGenerationMethodCall('set_password', None)
|
275257cfcf17fa1d2498e64735754cb4b8a3f2e8 | floo/sublime.py | floo/sublime.py | import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
| import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
calling_timeouts = False
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
global calling_timeouts
if calling_timeouts:
return
calling_timeouts = True
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
calling_timeouts = False
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
| Stop exception if someone calls timeouts from a timeout. | Stop exception if someone calls timeouts from a timeout.
| Python | apache-2.0 | Floobits/floobits-emacs | import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
Stop exception if someone calls timeouts from a timeout. | import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
calling_timeouts = False
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
global calling_timeouts
if calling_timeouts:
return
calling_timeouts = True
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
calling_timeouts = False
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
| <commit_before>import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
<commit_msg>Stop exception if someone calls timeouts from a timeout.<commit_after> | import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
calling_timeouts = False
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
global calling_timeouts
if calling_timeouts:
return
calling_timeouts = True
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
calling_timeouts = False
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
| import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
Stop exception if someone calls timeouts from a timeout.import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
calling_timeouts = False
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
global calling_timeouts
if calling_timeouts:
return
calling_timeouts = True
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
calling_timeouts = False
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
| <commit_before>import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
<commit_msg>Stop exception if someone calls timeouts from a timeout.<commit_after>import sys
from collections import defaultdict
import time
timeouts = defaultdict(list)
top_timeout_id = 0
cancelled_timeouts = set()
calling_timeouts = False
def windows(*args, **kwargs):
return []
def set_timeout(func, timeout, *args, **kwargs):
global top_timeout_id
timeout_id = top_timeout_id
top_timeout_id + 1
if top_timeout_id > 100000:
top_timeout_id = 0
def timeout_func():
if timeout_id in cancelled_timeouts:
cancelled_timeouts.remove(timeout_id)
return
func(*args, **kwargs)
then = time.time() + (timeout / 1000.0)
timeouts[then].append(timeout_func)
return timeout_id
def cancel_timeout(timeout_id):
if timeout_id in timeouts:
cancelled_timeouts.add(timeout_id)
def call_timeouts():
global calling_timeouts
if calling_timeouts:
return
calling_timeouts = True
now = time.time()
to_remove = []
for t, tos in timeouts.items():
if now >= t:
for timeout in tos:
timeout()
to_remove.append(t)
for k in to_remove:
del timeouts[k]
calling_timeouts = False
def error_message(*args, **kwargs):
print(args, kwargs)
class Region(object):
def __init__(*args, **kwargs):
pass
def platform():
return sys.platform
|
ac508ff550dab2b0e9171be4fc6c12529ab111c5 | gateway/conf.py | gateway/conf.py | CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 80
MOBILE_NETWORK_TCP_PORT = 13000
| CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 13000
MOBILE_NETWORK_TCP_PORT = 13001
| Change mobile's HTTP port to 13000 and TCP port to 13001 | Change mobile's HTTP port to 13000 and TCP port to 13001 | Python | mit | walkover/auto-tracking-cctv-gateway | CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 80
MOBILE_NETWORK_TCP_PORT = 13000
Change mobile's HTTP port to 13000 and TCP port to 13001 | CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 13000
MOBILE_NETWORK_TCP_PORT = 13001
| <commit_before>CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 80
MOBILE_NETWORK_TCP_PORT = 13000
<commit_msg>Change mobile's HTTP port to 13000 and TCP port to 13001<commit_after> | CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 13000
MOBILE_NETWORK_TCP_PORT = 13001
| CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 80
MOBILE_NETWORK_TCP_PORT = 13000
Change mobile's HTTP port to 13000 and TCP port to 13001CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 13000
MOBILE_NETWORK_TCP_PORT = 13001
| <commit_before>CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 80
MOBILE_NETWORK_TCP_PORT = 13000
<commit_msg>Change mobile's HTTP port to 13000 and TCP port to 13001<commit_after>CAMERA_NETWORK_IP = '0.0.0.0'
CAMERA_NETWORK_TCP_PORT = 9090
MOBILE_NETWORK_IP = '0.0.0.0'
MOBILE_NETWORK_HTTP_PORT = 13000
MOBILE_NETWORK_TCP_PORT = 13001
|
a02f2a1ba8f2cbf0cda0a6b8b794c4970bb4b4f2 | hackfmi/urls.py | hackfmi/urls.py | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
| from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),
)
| Add url for media files | Add url for media files
| Python | mit | Hackfmi/Diaphanum,Hackfmi/Diaphanum | from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
Add url for media files | from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),
)
| <commit_before>from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Add url for media files<commit_after> | from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),
)
| from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
Add url for media filesfrom django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),
)
| <commit_before>from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
<commit_msg>Add url for media files<commit_after>from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),
)
|
9eae7b3ae0701716621cad8e88d4737cad4523d8 | scripts/json-to-rsf/json2rsf.py | scripts/json-to-rsf/json2rsf.py | import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
| import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\tuser\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
| Update to use new RSF `append-entry` format | Update to use new RSF `append-entry` format
| Python | mit | openregister/openregister-java,openregister/openregister-java,openregister/openregister-java,openregister/openregister-java,openregister/openregister-java | import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
Update to use new RSF `append-entry` format | import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\tuser\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
| <commit_before>import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
<commit_msg>Update to use new RSF `append-entry` format<commit_after> | import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\tuser\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
| import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
Update to use new RSF `append-entry` formatimport sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\tuser\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
| <commit_before>import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
<commit_msg>Update to use new RSF `append-entry` format<commit_after>import sys
from datetime import datetime
import json
import hashlib
def print_rsf(item, key_field):
key = item[key_field]
timestamp = datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
item_str = json.dumps(item, separators=(',', ':'), sort_keys=True)
item_hash = hashlib.sha256(item_str.encode("utf-8")).hexdigest()
item_line = "add-item\t" + item_str
entry_line = "append-entry\tuser\t{0}\t{1}\tsha-256:{2}".format(
key, timestamp, item_hash)
print(item_line)
print(entry_line)
item_arr = json.load(sys.stdin)
if len(sys.argv) < 2:
sys.exit("Usage: cat foo.json | python json2rsf.py [key field name]")
if not isinstance(item_arr, list):
sys.exit("Error: input must be json array")
key = sys.argv[1]
[print_rsf(item, key) for item in item_arr]
|
4095b95930a57e78e35592dba413a776959adcde | logistic_order/model/sale_order.py | logistic_order/model/sale_order.py | # -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
}
| # -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
'state': fields.selection([
('draft', 'Draft Cost Estimate'),
('sent', 'Cost Estimate Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Logistic Order'),
('manual', 'Logistic Order to Invoice'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, track_visibility='onchange',
help="Gives the status of the cost estimate or logistic order. \nThe exception status is automatically set when a cancel operation occurs in the processing of a document linked to the logistic order. \nThe 'Waiting Schedule' status is set when the invoice is confirmed but waiting for the scheduler to run on the order date.", select=True),
}
| Rename state of SO according to LO and Cost Estimate | [IMP] Rename state of SO according to LO and Cost Estimate
| Python | agpl-3.0 | yvaucher/vertical-ngo,mdietrichc2c/vertical-ngo,jorsea/vertical-ngo,gurneyalex/vertical-ngo,jorsea/vertical-ngo,jgrandguillaume/vertical-ngo | # -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
}
[IMP] Rename state of SO according to LO and Cost Estimate | # -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
'state': fields.selection([
('draft', 'Draft Cost Estimate'),
('sent', 'Cost Estimate Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Logistic Order'),
('manual', 'Logistic Order to Invoice'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, track_visibility='onchange',
help="Gives the status of the cost estimate or logistic order. \nThe exception status is automatically set when a cancel operation occurs in the processing of a document linked to the logistic order. \nThe 'Waiting Schedule' status is set when the invoice is confirmed but waiting for the scheduler to run on the order date.", select=True),
}
| <commit_before># -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
}
<commit_msg>[IMP] Rename state of SO according to LO and Cost Estimate<commit_after> | # -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
'state': fields.selection([
('draft', 'Draft Cost Estimate'),
('sent', 'Cost Estimate Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Logistic Order'),
('manual', 'Logistic Order to Invoice'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, track_visibility='onchange',
help="Gives the status of the cost estimate or logistic order. \nThe exception status is automatically set when a cancel operation occurs in the processing of a document linked to the logistic order. \nThe 'Waiting Schedule' status is set when the invoice is confirmed but waiting for the scheduler to run on the order date.", select=True),
}
| # -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
}
[IMP] Rename state of SO according to LO and Cost Estimate# -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
'state': fields.selection([
('draft', 'Draft Cost Estimate'),
('sent', 'Cost Estimate Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Logistic Order'),
('manual', 'Logistic Order to Invoice'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, track_visibility='onchange',
help="Gives the status of the cost estimate or logistic order. \nThe exception status is automatically set when a cancel operation occurs in the processing of a document linked to the logistic order. \nThe 'Waiting Schedule' status is set when the invoice is confirmed but waiting for the scheduler to run on the order date.", select=True),
}
| <commit_before># -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
}
<commit_msg>[IMP] Rename state of SO according to LO and Cost Estimate<commit_after># -*- coding: utf-8 -*-
from openerp.osv import orm, fields
class sale_order(orm.Model):
_inherit = 'sale.order'
_columns = {
# override only to change the 'string' argument
# from 'Customer' to 'Requesting Entity'
'partner_id': fields.many2one(
'res.partner',
'Requesting Entity',
readonly=True,
states={'draft': [('readonly', False)],
'sent': [('readonly', False)]},
required=True,
change_default=True,
select=True,
track_visibility='always'),
'consignee_id': fields.many2one(
'res.partner',
string='Consignee',
required=True),
'incoterm_address': fields.char(
'Incoterm Place',
help="Incoterm Place of Delivery. "
"International Commercial Terms are a series of "
"predefined commercial terms used in "
"international transactions."),
'requested_by': fields.text('Requested By'),
'state': fields.selection([
('draft', 'Draft Cost Estimate'),
('sent', 'Cost Estimate Sent'),
('cancel', 'Cancelled'),
('waiting_date', 'Waiting Schedule'),
('progress', 'Logistic Order'),
('manual', 'Logistic Order to Invoice'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
], 'Status', readonly=True, track_visibility='onchange',
help="Gives the status of the cost estimate or logistic order. \nThe exception status is automatically set when a cancel operation occurs in the processing of a document linked to the logistic order. \nThe 'Waiting Schedule' status is set when the invoice is confirmed but waiting for the scheduler to run on the order date.", select=True),
}
|
be8b6e9b3cd81a22d85046c769e0d267b41004e3 | MoMMI/types.py | MoMMI/types.py | class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
| from typing import Union
class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
MIdentifier = Union[SnowflakeID, str]
| Add string and snowflake identifier union. | Add string and snowflake identifier union.
| Python | mit | PJB3005/MoMMI,PJB3005/MoMMI,PJB3005/MoMMI | class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
Add string and snowflake identifier union. | from typing import Union
class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
MIdentifier = Union[SnowflakeID, str]
| <commit_before>class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
<commit_msg>Add string and snowflake identifier union.<commit_after> | from typing import Union
class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
MIdentifier = Union[SnowflakeID, str]
| class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
Add string and snowflake identifier union.from typing import Union
class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
MIdentifier = Union[SnowflakeID, str]
| <commit_before>class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
<commit_msg>Add string and snowflake identifier union.<commit_after>from typing import Union
class SnowflakeID(int):
"""
Represents a Discord Snowflake ID.
"""
pass
MIdentifier = Union[SnowflakeID, str]
|
e10af1954feb6834da02ab5e641f0fe7a1785b0e | soapbox/templatetags/soapbox.py | soapbox/templatetags/soapbox.py | from django import template
from soapbox.models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '{% %s [url] as [varname] %}" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
| from django import template
from ..models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
@register.tag
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '%s [url] as [varname]" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
| Clean up the template tags module a bit. | Clean up the template tags module a bit.
| Python | bsd-3-clause | ubernostrum/django-soapbox,ubernostrum/django-soapbox | from django import template
from soapbox.models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '{% %s [url] as [varname] %}" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
Clean up the template tags module a bit. | from django import template
from ..models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
@register.tag
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '%s [url] as [varname]" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
| <commit_before>from django import template
from soapbox.models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '{% %s [url] as [varname] %}" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
<commit_msg>Clean up the template tags module a bit.<commit_after> | from django import template
from ..models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
@register.tag
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '%s [url] as [varname]" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
| from django import template
from soapbox.models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '{% %s [url] as [varname] %}" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
Clean up the template tags module a bit.from django import template
from ..models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
@register.tag
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '%s [url] as [varname]" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
| <commit_before>from django import template
from soapbox.models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '{% %s [url] as [varname] %}" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
<commit_msg>Clean up the template tags module a bit.<commit_after>from django import template
from ..models import Message
register = template.Library()
class MessagesForPageNode(template.Node):
def __init__(self, url, varname):
self.url = template.Variable(url)
self.varname = varname
def render(self, context):
try:
url = self.url.resolve(context)
except template.VariableDoesNotExist:
return ''
context[self.varname] = Message.objects.match(url)
return ''
@register.tag
def get_messages_for_page(parser, token):
bits = token.split_contents()
if len(bits) != 4 or bits[2] != 'as':
raise template.TemplateSyntaxError("%s syntax must be '%s [url] as [varname]" % (bits[0], bits[0]))
return MessagesForPageNode(bits[1], bits[3])
|
5c43036e44e94d55c86567d4e98689acde0510e5 | app/py/cuda_sort/sort_sep.py | app/py/cuda_sort/sort_sep.py | from cudatext import *
def _sort(s, sep_k, sep_v):
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix:', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)!=1 or len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
| from cudatext import *
def _sort(s, sep_k, sep_v):
if sep_k:
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
else:
vals = sorted(s.split(sep_v))
return sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix (optional):', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)>1:
msg_status('Separators must have length=1')
continue
if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
| Sort new cmd: allow empty separator | Sort new cmd: allow empty separator
| Python | mpl-2.0 | Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText,Alexey-T/CudaText | from cudatext import *
def _sort(s, sep_k, sep_v):
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix:', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)!=1 or len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
Sort new cmd: allow empty separator | from cudatext import *
def _sort(s, sep_k, sep_v):
if sep_k:
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
else:
vals = sorted(s.split(sep_v))
return sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix (optional):', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)>1:
msg_status('Separators must have length=1')
continue
if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
| <commit_before>from cudatext import *
def _sort(s, sep_k, sep_v):
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix:', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)!=1 or len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
<commit_msg>Sort new cmd: allow empty separator<commit_after> | from cudatext import *
def _sort(s, sep_k, sep_v):
if sep_k:
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
else:
vals = sorted(s.split(sep_v))
return sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix (optional):', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)>1:
msg_status('Separators must have length=1')
continue
if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
| from cudatext import *
def _sort(s, sep_k, sep_v):
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix:', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)!=1 or len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
Sort new cmd: allow empty separatorfrom cudatext import *
def _sort(s, sep_k, sep_v):
if sep_k:
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
else:
vals = sorted(s.split(sep_v))
return sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix (optional):', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)>1:
msg_status('Separators must have length=1')
continue
if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
| <commit_before>from cudatext import *
def _sort(s, sep_k, sep_v):
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix:', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)!=1 or len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
<commit_msg>Sort new cmd: allow empty separator<commit_after>from cudatext import *
def _sort(s, sep_k, sep_v):
if sep_k:
if not sep_k in s:
return s
key, val = s.split(sep_k, 1)
vals = sorted(val.split(sep_v))
return key+sep_k+sep_v.join(vals)
else:
vals = sorted(s.split(sep_v))
return sep_v.join(vals)
def do_sort_sep_values():
while 1:
res = dlg_input_ex(2,
'Sort: separator chars',
'Separator of prefix, to skip prefix (optional):', '=',
'Separator of values after prefix:', ',')
if res is None:
return
sep_k, sep_v = res
if len(sep_k)>1:
msg_status('Separators must have length=1')
continue
if len(sep_v)!=1:
msg_status('Separators must have length=1')
continue
if sep_k==sep_v:
msg_status('Separators cannot be the same')
continue
break
cnt = 0
for i in range(ed.get_line_count()):
s = ed.get_text_line(i)
s2 = _sort(s, sep_k, sep_v)
if s!=s2:
ed.set_text_line(i, s2)
cnt += 1
if cnt>0:
msg_status('Sorted, changed %d line(s)'%cnt)
else:
msg_status('Lines are already sorted')
|
5e80122c20e04b9208f6bc4ce13bc7ab7f757ff8 | web/impact/impact/v1/helpers/matching_criterion_helper.py | web/impact/impact/v1/helpers/matching_criterion_helper.py | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
| # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
self._check_cache(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
self._check_cache(apps)
return self._target_counts
def _check_cache(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
| Remove dead code and minor refactor | [AC-5625] Remove dead code and minor refactor
| Python | mit | masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
[AC-5625] Remove dead code and minor refactor | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
self._check_cache(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
self._check_cache(apps)
return self._target_counts
def _check_cache(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
| <commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
<commit_msg>[AC-5625] Remove dead code and minor refactor<commit_after> | # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
self._check_cache(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
self._check_cache(apps)
return self._target_counts
def _check_cache(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
| # MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
[AC-5625] Remove dead code and minor refactor# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
self._check_cache(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
self._check_cache(apps)
return self._target_counts
def _check_cache(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
| <commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def refine_feedbacks(self, feedbacks, target, refinement):
if not target:
return None
query = Q(**{refinement: target})
return feedbacks.filter(query)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
return self._target_counts
<commit_msg>[AC-5625] Remove dead code and minor refactor<commit_after># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.db.models import Q
from impact.v1.helpers.criterion_helper import CriterionHelper
class MatchingCriterionHelper(CriterionHelper):
def __init__(self, subject):
super().__init__(subject)
self._app_ids_to_targets = {}
self._target_counts = {}
def app_count(self, apps, option_name):
return self.target_counts(apps).get(option_name, 0)
def find_app_ids(self, feedbacks, apps, target):
if not feedbacks:
return []
result = []
app_map = self.app_ids_to_targets(apps)
return [app_id for app_id in
feedbacks.values_list("application_id", flat=True)
if app_id in app_map and app_map[app_id] == target.id]
def app_ids_to_targets(self, apps):
self._check_cache(apps)
return self._app_ids_to_targets
def target_counts(self, apps):
self._check_cache(apps)
return self._target_counts
def _check_cache(self, apps):
if not self._app_ids_to_targets:
self.calc_app_ids_to_targets(apps)
|
ba8939167379633b5b572cd7b70c477f101b95dd | application.py | application.py | #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(os.getenv('FLASH_CONFIG') or 'default')
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(
os.getenv('DM_SUPPLIER_FRONTEND_ENVIRONMENT') or 'default'
)
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
| Rename FLASH_CONFIG to match common convention | Rename FLASH_CONFIG to match common convention
| Python | mit | mtekel/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend | #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(os.getenv('FLASH_CONFIG') or 'default')
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
Rename FLASH_CONFIG to match common convention | #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(
os.getenv('DM_SUPPLIER_FRONTEND_ENVIRONMENT') or 'default'
)
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
| <commit_before>#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(os.getenv('FLASH_CONFIG') or 'default')
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
<commit_msg>Rename FLASH_CONFIG to match common convention<commit_after> | #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(
os.getenv('DM_SUPPLIER_FRONTEND_ENVIRONMENT') or 'default'
)
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
| #!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(os.getenv('FLASH_CONFIG') or 'default')
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
Rename FLASH_CONFIG to match common convention#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(
os.getenv('DM_SUPPLIER_FRONTEND_ENVIRONMENT') or 'default'
)
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
| <commit_before>#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(os.getenv('FLASH_CONFIG') or 'default')
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
<commit_msg>Rename FLASH_CONFIG to match common convention<commit_after>#!/usr/bin/env python
import os
from app import create_app
from flask.ext.script import Manager, Server
application = create_app(
os.getenv('DM_SUPPLIER_FRONTEND_ENVIRONMENT') or 'default'
)
manager = Manager(application)
manager.add_command("runserver", Server(port=5003))
if __name__ == '__main__':
manager.run()
|
4ab6009a01f71abaff72db7311f3a74d88ec524c | examples/pax_mininet_node.py | examples/pax_mininet_node.py | # coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
| # coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
| Disable ip_forward in the Pax Mininet node class | Disable ip_forward in the Pax Mininet node class
| Python | apache-2.0 | niksu/pax,TMVector/pax,niksu/pax,TMVector/pax,niksu/pax | # coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
Disable ip_forward in the Pax Mininet node class | # coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
| <commit_before># coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
<commit_msg>Disable ip_forward in the Pax Mininet node class<commit_after> | # coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
| # coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
Disable ip_forward in the Pax Mininet node class# coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
| <commit_before># coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
<commit_msg>Disable ip_forward in the Pax Mininet node class<commit_after># coding: latin-1
"""
pax_mininet_node.py: Defines PaxNode which allows Pax to behave as the sole packet hander on a node.
"""
from mininet.node import Node
from mininet.log import info, warn
class PaxNode( Node ):
"PaxNode: A node which allows Pax to behave as the sole packet hander on that node."
def __init__(self, name, **params):
super(PaxNode, self).__init__(name, **params)
def config(self, **params):
super(PaxNode, self).config(**params)
# Setup iptable rules to drop incoming packets on each interface:
# Because Pax only sniffs packets (it doesn't steal them), we need to drop the packets
# to prevent the OS from handling them and responding.
print "Drop all incoming TCP traffic on nat0 so that Pax is effectively the middle-man"
for intf in self.intfList():
self.cmd("iptables -A INPUT -p tcp -i %s -j DROP" % intf.name)
# Disable ip_forward because otherwise this still happens, even with the above iptables rules
self.cmd("sysctl -w net.ipv4.ip_forward=0")
def terminate(self):
# Remove iptables rules
for intf in self.intfList():
self.cmd("iptables -D INPUT -p tcp -i %s -j DROP" % intf.name)
super(PaxNode, self).terminate()
|
e042101c8f4c5ef06e590a47114778a0cf06d4f0 | oh_bot/lookup_discussion_topics.py | oh_bot/lookup_discussion_topics.py | import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
| import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
if not topics:
topics = "No topics for this date have been set"
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
| Add response if no topics are set | Add response if no topics are set
| Python | mit | silvermullet/oh-bot,silvermullet/oh-bot,silvermullet/oh-bot | import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
Add response if no topics are set | import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
if not topics:
topics = "No topics for this date have been set"
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
| <commit_before>import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
<commit_msg>Add response if no topics are set<commit_after> | import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
if not topics:
topics = "No topics for this date have been set"
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
| import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
Add response if no topics are setimport logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
if not topics:
topics = "No topics for this date have been set"
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
| <commit_before>import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
<commit_msg>Add response if no topics are set<commit_after>import logging
import time
import os
import boto3
from boto3.dynamodb.conditions import Key, Attr
dynamodb = boto3.resource('dynamodb')
def build_response(message):
topics = ""
for i, j in enumerate(message['Items']):
topics += "{0}: {1}\n".format(i + 1, j['topic'])
if not topics:
topics = "No topics for this date have been set"
return {
"dialogAction": {
"type": "Close",
"fulfillmentState": "Fulfilled",
"message": {
"contentType": "PlainText",
"content": topics
}
}
}
def lookup_discussion_topics(event, context):
#debug print for lex event data
print(event)
table = dynamodb.Table(os.environ['DYNAMODB_TABLE_TOPICS'])
team = event['currentIntent']['slots']['GetTeam']
date = event['currentIntent']['slots']['GetQueryDay']
filter_expression = Key('date').eq(date) & Key('team').eq(team);
response = table.scan(
FilterExpression=filter_expression,
)
print("Query succeeded:")
return build_response(response)
|
5d634511af87150cf1e1b57c52b2bb7136890eb4 | twilix/cmd.py | twilix/cmd.py | import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if path[0] == '~':
path[0] = os.path.expanduser(path[0])
os.chdir(path[0])
return run_pwd()
def cmd_mkdir(*args):
try:
if path[0][0] == '~':
path[0] = os.path.expanduser(path[0])
os.makedirs(path[0])
return "Director {0} created".format(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir
}
if __name__ == '__main__':
a = cmd_mkdir("~/Test/ing")
print a
| import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if args[0][1] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.chdir(args[0][1])
return cmd_pwd()
def cmd_mkdir(*args):
try:
if args[0][1][0] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.makedirs(args[0][1])
return "Director {0} created".format(args[0][1])
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def cmd_pipe(*args):
p1 = subprocess.Popen(args[0][0], stdout=subprocess.PIPE)
p2 = subprocess.Popen(args[0][1], stdin=p1.stdout, stdout=subprocess.PIPE)
output = p2.communicate()[0]
return output
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir,
'pipe': cmd_pipe
}
if __name__ == '__main__':
a = cmd_mkdir("~/ue/mhacks")
print a
| Add option tu run piped commands | Add option tu run piped commands
| Python | mit | ueg1990/twilix,ueg1990/twilix | import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if path[0] == '~':
path[0] = os.path.expanduser(path[0])
os.chdir(path[0])
return run_pwd()
def cmd_mkdir(*args):
try:
if path[0][0] == '~':
path[0] = os.path.expanduser(path[0])
os.makedirs(path[0])
return "Director {0} created".format(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir
}
if __name__ == '__main__':
a = cmd_mkdir("~/Test/ing")
print a
Add option tu run piped commands | import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if args[0][1] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.chdir(args[0][1])
return cmd_pwd()
def cmd_mkdir(*args):
try:
if args[0][1][0] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.makedirs(args[0][1])
return "Director {0} created".format(args[0][1])
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def cmd_pipe(*args):
p1 = subprocess.Popen(args[0][0], stdout=subprocess.PIPE)
p2 = subprocess.Popen(args[0][1], stdin=p1.stdout, stdout=subprocess.PIPE)
output = p2.communicate()[0]
return output
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir,
'pipe': cmd_pipe
}
if __name__ == '__main__':
a = cmd_mkdir("~/ue/mhacks")
print a
| <commit_before>import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if path[0] == '~':
path[0] = os.path.expanduser(path[0])
os.chdir(path[0])
return run_pwd()
def cmd_mkdir(*args):
try:
if path[0][0] == '~':
path[0] = os.path.expanduser(path[0])
os.makedirs(path[0])
return "Director {0} created".format(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir
}
if __name__ == '__main__':
a = cmd_mkdir("~/Test/ing")
print a
<commit_msg>Add option tu run piped commands<commit_after> | import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if args[0][1] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.chdir(args[0][1])
return cmd_pwd()
def cmd_mkdir(*args):
try:
if args[0][1][0] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.makedirs(args[0][1])
return "Director {0} created".format(args[0][1])
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def cmd_pipe(*args):
p1 = subprocess.Popen(args[0][0], stdout=subprocess.PIPE)
p2 = subprocess.Popen(args[0][1], stdin=p1.stdout, stdout=subprocess.PIPE)
output = p2.communicate()[0]
return output
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir,
'pipe': cmd_pipe
}
if __name__ == '__main__':
a = cmd_mkdir("~/ue/mhacks")
print a
| import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if path[0] == '~':
path[0] = os.path.expanduser(path[0])
os.chdir(path[0])
return run_pwd()
def cmd_mkdir(*args):
try:
if path[0][0] == '~':
path[0] = os.path.expanduser(path[0])
os.makedirs(path[0])
return "Director {0} created".format(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir
}
if __name__ == '__main__':
a = cmd_mkdir("~/Test/ing")
print a
Add option tu run piped commandsimport os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if args[0][1] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.chdir(args[0][1])
return cmd_pwd()
def cmd_mkdir(*args):
try:
if args[0][1][0] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.makedirs(args[0][1])
return "Director {0} created".format(args[0][1])
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def cmd_pipe(*args):
p1 = subprocess.Popen(args[0][0], stdout=subprocess.PIPE)
p2 = subprocess.Popen(args[0][1], stdin=p1.stdout, stdout=subprocess.PIPE)
output = p2.communicate()[0]
return output
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir,
'pipe': cmd_pipe
}
if __name__ == '__main__':
a = cmd_mkdir("~/ue/mhacks")
print a
| <commit_before>import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if path[0] == '~':
path[0] = os.path.expanduser(path[0])
os.chdir(path[0])
return run_pwd()
def cmd_mkdir(*args):
try:
if path[0][0] == '~':
path[0] = os.path.expanduser(path[0])
os.makedirs(path[0])
return "Director {0} created".format(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir
}
if __name__ == '__main__':
a = cmd_mkdir("~/Test/ing")
print a
<commit_msg>Add option tu run piped commands<commit_after>import os
import subprocess
import errno
def cmd_pwd(*args):
return subprocess.check_output(['pwd'])
def cmd_ls(*args):
return subprocess.check_output(*args)
def cmd_cd(*args):
if args[0][1] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.chdir(args[0][1])
return cmd_pwd()
def cmd_mkdir(*args):
try:
if args[0][1][0] == '~':
args[0][1] = os.path.expanduser(args[0][1])
os.makedirs(args[0][1])
return "Director {0} created".format(args[0][1])
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def cmd_pipe(*args):
p1 = subprocess.Popen(args[0][0], stdout=subprocess.PIPE)
p2 = subprocess.Popen(args[0][1], stdin=p1.stdout, stdout=subprocess.PIPE)
output = p2.communicate()[0]
return output
cmds = {
'pwd' : cmd_pwd,
'ls' : cmd_ls,
'cd' : cmd_cd,
'mkdir': cmd_mkdir,
'pipe': cmd_pipe
}
if __name__ == '__main__':
a = cmd_mkdir("~/ue/mhacks")
print a
|
c5467b2ad4fbb0dbc37809df077e5c69915489c9 | go_cli/send.py | go_cli/send.py | """ Send messages via an HTTP API (nostream) conversation. """
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
messages = [] # TODO: parse csv or json
for msg in messages:
http_api.send_text(**msg)
| """ Send messages via an HTTP API (nostream) conversation. """
import csv
import json
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
if not any((csv, json)):
click.echo("Please specify either --csv or --json.")
ctx.abort()
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
if csv:
for msg in messages_from_csv(csv):
http_api.send_text(**msg)
if json:
for msg in messages_from_json(json):
http_api.send_text(**msg)
def messages_from_csv(csv_file):
reader = csv.DictReader(csv_file)
for data in reader:
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
def messages_from_json(json_file):
for line in json_file:
data = json.loads(line.rstrip("\n"))
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
| Add CSV and JSON parsing. | Add CSV and JSON parsing.
| Python | bsd-3-clause | praekelt/go-cli,praekelt/go-cli | """ Send messages via an HTTP API (nostream) conversation. """
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
messages = [] # TODO: parse csv or json
for msg in messages:
http_api.send_text(**msg)
Add CSV and JSON parsing. | """ Send messages via an HTTP API (nostream) conversation. """
import csv
import json
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
if not any((csv, json)):
click.echo("Please specify either --csv or --json.")
ctx.abort()
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
if csv:
for msg in messages_from_csv(csv):
http_api.send_text(**msg)
if json:
for msg in messages_from_json(json):
http_api.send_text(**msg)
def messages_from_csv(csv_file):
reader = csv.DictReader(csv_file)
for data in reader:
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
def messages_from_json(json_file):
for line in json_file:
data = json.loads(line.rstrip("\n"))
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
| <commit_before>""" Send messages via an HTTP API (nostream) conversation. """
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
messages = [] # TODO: parse csv or json
for msg in messages:
http_api.send_text(**msg)
<commit_msg>Add CSV and JSON parsing.<commit_after> | """ Send messages via an HTTP API (nostream) conversation. """
import csv
import json
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
if not any((csv, json)):
click.echo("Please specify either --csv or --json.")
ctx.abort()
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
if csv:
for msg in messages_from_csv(csv):
http_api.send_text(**msg)
if json:
for msg in messages_from_json(json):
http_api.send_text(**msg)
def messages_from_csv(csv_file):
reader = csv.DictReader(csv_file)
for data in reader:
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
def messages_from_json(json_file):
for line in json_file:
data = json.loads(line.rstrip("\n"))
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
| """ Send messages via an HTTP API (nostream) conversation. """
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
messages = [] # TODO: parse csv or json
for msg in messages:
http_api.send_text(**msg)
Add CSV and JSON parsing.""" Send messages via an HTTP API (nostream) conversation. """
import csv
import json
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
if not any((csv, json)):
click.echo("Please specify either --csv or --json.")
ctx.abort()
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
if csv:
for msg in messages_from_csv(csv):
http_api.send_text(**msg)
if json:
for msg in messages_from_json(json):
http_api.send_text(**msg)
def messages_from_csv(csv_file):
reader = csv.DictReader(csv_file)
for data in reader:
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
def messages_from_json(json_file):
for line in json_file:
data = json.loads(line.rstrip("\n"))
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
| <commit_before>""" Send messages via an HTTP API (nostream) conversation. """
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
messages = [] # TODO: parse csv or json
for msg in messages:
http_api.send_text(**msg)
<commit_msg>Add CSV and JSON parsing.<commit_after>""" Send messages via an HTTP API (nostream) conversation. """
import csv
import json
import click
from go_http.send import HttpApiSender
@click.option(
'--conversation', '-c',
help='HTTP API conversation key')
@click.option(
'--token', '-t',
help='HTTP API conversation token')
@click.option(
'--csv', type=click.File('rb'),
help=('CSV file with columns to_addr, content and, optionally,'
'session_event.'))
@click.option(
'--json', type=click.File('rb'),
help=('JSON objects, one per line with fields to_addr, content and,'
' optionally, session_event'))
@click.pass_context
def send(ctx, conversation, token, csv, json):
""" Send messages via an HTTP API (nostream) conversation.
"""
if not any((csv, json)):
click.echo("Please specify either --csv or --json.")
ctx.abort()
http_api = HttpApiSender(ctx.obj.account_key, conversation, token)
if csv:
for msg in messages_from_csv(csv):
http_api.send_text(**msg)
if json:
for msg in messages_from_json(json):
http_api.send_text(**msg)
def messages_from_csv(csv_file):
reader = csv.DictReader(csv_file)
for data in reader:
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
def messages_from_json(json_file):
for line in json_file:
data = json.loads(line.rstrip("\n"))
yield {
"to_addr": data["to_addr"],
"content": data["content"],
"session_event": data.get("session_event")
}
|
a154db2de427a31746c51c39077c6020f70478b6 | export.py | export.py | #! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
| #! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
print(records)
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
| Print encrypted records for debugging. | Print encrypted records for debugging.
| Python | mit | tortxof/cherry-password,tortxof/cherry-password,tortxof/cherry-password | #! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
Print encrypted records for debugging. | #! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
print(records)
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
| <commit_before>#! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
<commit_msg>Print encrypted records for debugging.<commit_after> | #! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
print(records)
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
| #! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
Print encrypted records for debugging.#! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
print(records)
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
| <commit_before>#! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
<commit_msg>Print encrypted records for debugging.<commit_after>#! /usr/bin/env python3
import sqlite3
import bcrypt
import hashlib
from Crypto.Cipher import AES
import codecs
import json
from password import encrypt, decrypt, toHex, fromHex
pwdatabase = 'passwords.db'
jsonfile = open('passwords.json', mode='w')
password = input('Enter password: ')
conn = sqlite3.connect(pwdatabase)
pwHash, salt = conn.execute('select * from master_pass').fetchone()
if bcrypt.checkpw(password, pwHash):
print('Password is correct.')
aes_key = toHex(bcrypt.kdf(password, salt, 16, 32))
records = [list(i) for i in conn.execute('select * from passwords')]
print(records)
for i in range(len(records)):
records[i][3] = decrypt(aes_key, records[i][3]).decode()
records[i][4] = decrypt(aes_key, records[i][4]).decode()
json.dump(records,jsonfile)
else:
print('Incorrect password.')
jsonfile.close()
conn.close()
|
24f198ce9a5558627b004a38eb81ae91ae749116 | examples/connection.py | examples/connection.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.list_flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
| Rename list_flavors flavors in example | Rename list_flavors flavors in example
Change-Id: Idf699774484a29fa9e9bb1bf654ed00d6ca9907d
| Python | apache-2.0 | dtroyer/python-openstacksdk,stackforge/python-openstacksdk,briancurtin/python-openstacksdk,mtougeron/python-openstacksdk,briancurtin/python-openstacksdk,dtroyer/python-openstacksdk,mtougeron/python-openstacksdk,dudymas/python-openstacksdk,dudymas/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,openstack/python-openstacksdk | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.list_flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
Rename list_flavors flavors in example
Change-Id: Idf699774484a29fa9e9bb1bf654ed00d6ca9907d | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.list_flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
<commit_msg>Rename list_flavors flavors in example
Change-Id: Idf699774484a29fa9e9bb1bf654ed00d6ca9907d<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.list_flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
Rename list_flavors flavors in example
Change-Id: Idf699774484a29fa9e9bb1bf654ed00d6ca9907d# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.list_flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
<commit_msg>Rename list_flavors flavors in example
Change-Id: Idf699774484a29fa9e9bb1bf654ed00d6ca9907d<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Example Connection Command
Make sure you can authenticate before running this command.
For example:
python -m examples.connection
"""
import sys
import os_client_config
from examples import common
from openstack import connection
def make_connection(opts):
occ = os_client_config.OpenStackConfig()
cloud = occ.get_one_cloud(opts.cloud, opts)
auth = cloud.config['auth']
conn = connection.Connection(preference=opts.user_preferences, **auth)
return conn
def run_connection(opts):
conn = make_connection(opts)
print("Connection: %s" % conn)
for flavor in conn.compute.flavors():
print(flavor.id + " " + flavor.name)
return
if __name__ == "__main__":
opts = common.setup()
sys.exit(common.main(opts, run_connection))
|
b60f74bfa84d02d7a868905de12f16e715dfca98 | iacli/argparser.py | iacli/argparser.py | from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':')
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
| from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':', 1)
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
| Allow for ":" in argument values (only split args on first occurence of ":". | Allow for ":" in argument values (only split args on first occurence of ":".
| Python | agpl-3.0 | dattasaurabh82/internetarchive,wumpus/internetarchive,brycedrennan/internetarchive,JesseWeinstein/internetarchive,jjjake/internetarchive | from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':')
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
Allow for ":" in argument values (only split args on first occurence of ":". | from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':', 1)
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
| <commit_before>from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':')
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
<commit_msg>Allow for ":" in argument values (only split args on first occurence of ":".<commit_after> | from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':', 1)
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
| from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':')
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
Allow for ":" in argument values (only split args on first occurence of ":".from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':', 1)
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
| <commit_before>from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':')
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
<commit_msg>Allow for ":" in argument values (only split args on first occurence of ":".<commit_after>from collections import defaultdict
# get_args_dict()
#_________________________________________________________________________________________
def get_args_dict(args):
metadata = defaultdict(list)
for md in args:
key, value = md.split(':', 1)
metadata[key].append(value)
# Flatten single item lists.
for key, value in metadata.items():
if len(value) <= 1:
metadata[key] = value[0]
return metadata
|
bd1ebf9dd9678c6c17826487f43e9c762b9bd1f0 | plumbium/recorders/csvfile.py | plumbium/recorders/csvfile.py | import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field]= self.values[field](results)
writer.writerow(row)
| import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field] = self.values[field](results)
writer.writerow(row)
| Fix a couple of PEP8 issues | Fix a couple of PEP8 issues
| Python | mit | jstutters/Plumbium | import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field]= self.values[field](results)
writer.writerow(row)
Fix a couple of PEP8 issues | import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field] = self.values[field](results)
writer.writerow(row)
| <commit_before>import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field]= self.values[field](results)
writer.writerow(row)
<commit_msg>Fix a couple of PEP8 issues<commit_after> | import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field] = self.values[field](results)
writer.writerow(row)
| import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field]= self.values[field](results)
writer.writerow(row)
Fix a couple of PEP8 issuesimport os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field] = self.values[field](results)
writer.writerow(row)
| <commit_before>import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field]= self.values[field](results)
writer.writerow(row)
<commit_msg>Fix a couple of PEP8 issues<commit_after>import os
import csv
class CSVFile(object):
def __init__(self, path, values):
self.path = path
self.values = values
def write(self, results):
field_names = self.values.keys()
write_header = not os.path.exists(self.path)
with open(self.path, 'a') as output_file:
writer = csv.DictWriter(output_file, fieldnames=field_names)
if write_header:
writer.writeheader()
row = {}
for field in self.values:
row[field] = self.values[field](results)
writer.writerow(row)
|
7244f571d3c03cbf89d67a44c8c21b7ace893362 | mediacloud/mediawords/db/schema/test_version.py | mediacloud/mediawords/db/schema/test_version.py | from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(SchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
| from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(McSchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
| Prepend “Mc” to expected exception name | Prepend “Mc” to expected exception name
| Python | agpl-3.0 | berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud | from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(SchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
Prepend “Mc” to expected exception name | from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(McSchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
| <commit_before>from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(SchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
<commit_msg>Prepend “Mc” to expected exception name<commit_after> | from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(McSchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
| from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(SchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
Prepend “Mc” to expected exception namefrom nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(McSchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
| <commit_before>from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(SchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
<commit_msg>Prepend “Mc” to expected exception name<commit_after>from nose.tools import assert_raises
from mediawords.db.schema.version import *
def test_schema_version_from_lines():
assert_raises(McSchemaVersionFromLinesException, schema_version_from_lines, 'no version')
# noinspection SqlDialectInspection,SqlNoDataSourceInspection
assert schema_version_from_lines("""
CREATE OR REPLACE FUNCTION set_database_schema_version() RETURNS boolean AS $$
DECLARE
-- Database schema version number (same as a SVN revision number)
-- Increase it by 1 if you make major database schema changes.
MEDIACLOUD_DATABASE_SCHEMA_VERSION CONSTANT INT := 4588;
BEGIN
-- Update / set database schema version
DELETE FROM database_variables WHERE name = 'database-schema-version';
INSERT INTO database_variables (name, value) VALUES
('database-schema-version', MEDIACLOUD_DATABASE_SCHEMA_VERSION::int);
return true;
END;
$$
LANGUAGE 'plpgsql';
""") == 4588
|
dfdc59e0203aadb96984fb155acea34fdca2b548 | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None | Fix the travis issues 2 | Fix the travis issues 2
| Python | mit | tomaskrehlik/SublimeLinter-contrib-julialint,tomaskrehlik/SublimeLinter-contrib-julialint | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = NoneFix the travis issues 2 | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None | <commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None<commit_msg>Fix the travis issues 2<commit_after> | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = NoneFix the travis issues 2#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None | <commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None<commit_msg>Fix the travis issues 2<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Tomas Krehlik
# Copyright (c) 2014 Tomas Krehlik
#
# License: MIT
#
"""This module exports the Julialint plugin class."""
from SublimeLinter.lint import Linter, util
class Julialint(Linter):
"""Provides an interface to julialint."""
syntax = 'julia'
cmd = ['julia', '-e', 'using Lint; lintfile(ARGS[1])']
regex = r'(?P<file>^.*\.jl):(?P<line>\d{1,4}) \[(?P<func>.*)\] ((?P<error>ERROR)|(?P<warning>WARN)) (?P<message>.*)'
multiline = False
line_col_base = (1, 1)
tempfile_suffix = "jl"
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
comment_re = r'#'
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None |
6663f7baefd68a059c963d464afaf3fcbfbdf2db | tests/markdown/MarkdownBearTest.py | tests/markdown/MarkdownBearTest.py | from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.LocalBearTestHelper import verify_local_bear
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
| import unittest
from queue import Queue
from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.BearTestHelper import generate_skip_decorator
from coalib.testing.LocalBearTestHelper import verify_local_bear, execute_bear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
from coala_utils.ContextManagers import prepare_file
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
@generate_skip_decorator(MarkdownBear)
class MarkdownBearMaxLineLengthMessageTest(unittest.TestCase):
def setUp(self):
self.section = Section('name')
self.uut = MarkdownBear(self.section, Queue())
def test_invalid_message(self):
content = test_file3.splitlines()
self.section.append(Setting('max_line_length', '10'))
with prepare_file(content, None) as (file, fname):
with execute_bear(self.uut, fname, file) as results:
self.assertEqual(results[0].message,
'Line must be at most 10 characters'
' maximum-line-length remark-lint')
self.assertEqual(results[0].severity, RESULT_SEVERITY.NORMAL)
| Add test to check message for error | MarkdownBear: Add test to check message for error
A better test for MarkdownBear to check the exact message
of the result for a maximum line length error.
Related to https://github.com/coala/coala-bears/issues/1235
| Python | agpl-3.0 | Asnelchristian/coala-bears,damngamerz/coala-bears,aptrishu/coala-bears,srisankethu/coala-bears,incorrectusername/coala-bears,yash-nisar/coala-bears,Shade5/coala-bears,madhukar01/coala-bears,naveentata/coala-bears,shreyans800755/coala-bears,damngamerz/coala-bears,Vamshi99/coala-bears,shreyans800755/coala-bears,horczech/coala-bears,Shade5/coala-bears,madhukar01/coala-bears,coala-analyzer/coala-bears,damngamerz/coala-bears,ankit01ojha/coala-bears,damngamerz/coala-bears,refeed/coala-bears,gs0510/coala-bears,Asnelchristian/coala-bears,damngamerz/coala-bears,seblat/coala-bears,refeed/coala-bears,Asnelchristian/coala-bears,shreyans800755/coala-bears,yash-nisar/coala-bears,kaustubhhiware/coala-bears,coala/coala-bears,coala/coala-bears,naveentata/coala-bears,coala/coala-bears,srisankethu/coala-bears,yashtrivedi96/coala-bears,horczech/coala-bears,yashtrivedi96/coala-bears,refeed/coala-bears,Vamshi99/coala-bears,aptrishu/coala-bears,Asnelchristian/coala-bears,ankit01ojha/coala-bears,gs0510/coala-bears,refeed/coala-bears,seblat/coala-bears,horczech/coala-bears,incorrectusername/coala-bears,ankit01ojha/coala-bears,incorrectusername/coala-bears,srisankethu/coala-bears,arjunsinghy96/coala-bears,arjunsinghy96/coala-bears,Asnelchristian/coala-bears,seblat/coala-bears,yash-nisar/coala-bears,kaustubhhiware/coala-bears,arjunsinghy96/coala-bears,ankit01ojha/coala-bears,Asnelchristian/coala-bears,incorrectusername/coala-bears,srisankethu/coala-bears,srisankethu/coala-bears,coala/coala-bears,Vamshi99/coala-bears,kaustubhhiware/coala-bears,srisankethu/coala-bears,coala/coala-bears,yashtrivedi96/coala-bears,meetmangukiya/coala-bears,yashtrivedi96/coala-bears,yash-nisar/coala-bears,shreyans800755/coala-bears,seblat/coala-bears,Vamshi99/coala-bears,yashtrivedi96/coala-bears,coala/coala-bears,refeed/coala-bears,shreyans800755/coala-bears,meetmangukiya/coala-bears,arjunsinghy96/coala-bears,refeed/coala-bears,horczech/coala-bears,srisankethu/coala-bears,kaustubhhiware/coala-bears,coala-analyzer/coala-bears,arjunsinghy96/coala-bears,gs0510/coala-bears,shreyans800755/coala-bears,refeed/coala-bears,shreyans800755/coala-bears,yash-nisar/coala-bears,Shade5/coala-bears,Vamshi99/coala-bears,damngamerz/coala-bears,damngamerz/coala-bears,srisankethu/coala-bears,seblat/coala-bears,Shade5/coala-bears,ankit01ojha/coala-bears,aptrishu/coala-bears,coala-analyzer/coala-bears,ankit01ojha/coala-bears,coala/coala-bears,incorrectusername/coala-bears,Shade5/coala-bears,seblat/coala-bears,shreyans800755/coala-bears,coala-analyzer/coala-bears,srisankethu/coala-bears,ankit01ojha/coala-bears,Vamshi99/coala-bears,yashtrivedi96/coala-bears,incorrectusername/coala-bears,gs0510/coala-bears,incorrectusername/coala-bears,coala/coala-bears,meetmangukiya/coala-bears,horczech/coala-bears,coala-analyzer/coala-bears,incorrectusername/coala-bears,Shade5/coala-bears,srisankethu/coala-bears,horczech/coala-bears,gs0510/coala-bears,damngamerz/coala-bears,yash-nisar/coala-bears,aptrishu/coala-bears,meetmangukiya/coala-bears,seblat/coala-bears,gs0510/coala-bears,aptrishu/coala-bears,arjunsinghy96/coala-bears,Shade5/coala-bears,madhukar01/coala-bears,yash-nisar/coala-bears,horczech/coala-bears,madhukar01/coala-bears,Asnelchristian/coala-bears,Asnelchristian/coala-bears,naveentata/coala-bears,ankit01ojha/coala-bears,Shade5/coala-bears,ankit01ojha/coala-bears,arjunsinghy96/coala-bears,refeed/coala-bears,shreyans800755/coala-bears,gs0510/coala-bears,naveentata/coala-bears,horczech/coala-bears,aptrishu/coala-bears,meetmangukiya/coala-bears,aptrishu/coala-bears,yashtrivedi96/coala-bears,yash-nisar/coala-bears,yashtrivedi96/coala-bears,refeed/coala-bears,gs0510/coala-bears,refeed/coala-bears,aptrishu/coala-bears,incorrectusername/coala-bears,naveentata/coala-bears,Vamshi99/coala-bears,Shade5/coala-bears,naveentata/coala-bears,horczech/coala-bears,coala-analyzer/coala-bears,damngamerz/coala-bears,damngamerz/coala-bears,horczech/coala-bears,Vamshi99/coala-bears,coala-analyzer/coala-bears,srisankethu/coala-bears,kaustubhhiware/coala-bears,madhukar01/coala-bears,kaustubhhiware/coala-bears,yash-nisar/coala-bears,naveentata/coala-bears,coala/coala-bears,Vamshi99/coala-bears,refeed/coala-bears,coala/coala-bears,horczech/coala-bears,kaustubhhiware/coala-bears,meetmangukiya/coala-bears,naveentata/coala-bears,gs0510/coala-bears,shreyans800755/coala-bears,aptrishu/coala-bears,coala-analyzer/coala-bears,ankit01ojha/coala-bears,shreyans800755/coala-bears,yashtrivedi96/coala-bears,ankit01ojha/coala-bears,Vamshi99/coala-bears,kaustubhhiware/coala-bears,madhukar01/coala-bears,naveentata/coala-bears,kaustubhhiware/coala-bears,meetmangukiya/coala-bears,coala/coala-bears,madhukar01/coala-bears,coala/coala-bears,arjunsinghy96/coala-bears,damngamerz/coala-bears,coala-analyzer/coala-bears,madhukar01/coala-bears,Vamshi99/coala-bears,meetmangukiya/coala-bears,yash-nisar/coala-bears,aptrishu/coala-bears,Asnelchristian/coala-bears,aptrishu/coala-bears,meetmangukiya/coala-bears,seblat/coala-bears,arjunsinghy96/coala-bears,yash-nisar/coala-bears,madhukar01/coala-bears | from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.LocalBearTestHelper import verify_local_bear
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
MarkdownBear: Add test to check message for error
A better test for MarkdownBear to check the exact message
of the result for a maximum line length error.
Related to https://github.com/coala/coala-bears/issues/1235 | import unittest
from queue import Queue
from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.BearTestHelper import generate_skip_decorator
from coalib.testing.LocalBearTestHelper import verify_local_bear, execute_bear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
from coala_utils.ContextManagers import prepare_file
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
@generate_skip_decorator(MarkdownBear)
class MarkdownBearMaxLineLengthMessageTest(unittest.TestCase):
def setUp(self):
self.section = Section('name')
self.uut = MarkdownBear(self.section, Queue())
def test_invalid_message(self):
content = test_file3.splitlines()
self.section.append(Setting('max_line_length', '10'))
with prepare_file(content, None) as (file, fname):
with execute_bear(self.uut, fname, file) as results:
self.assertEqual(results[0].message,
'Line must be at most 10 characters'
' maximum-line-length remark-lint')
self.assertEqual(results[0].severity, RESULT_SEVERITY.NORMAL)
| <commit_before>from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.LocalBearTestHelper import verify_local_bear
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
<commit_msg>MarkdownBear: Add test to check message for error
A better test for MarkdownBear to check the exact message
of the result for a maximum line length error.
Related to https://github.com/coala/coala-bears/issues/1235<commit_after> | import unittest
from queue import Queue
from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.BearTestHelper import generate_skip_decorator
from coalib.testing.LocalBearTestHelper import verify_local_bear, execute_bear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
from coala_utils.ContextManagers import prepare_file
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
@generate_skip_decorator(MarkdownBear)
class MarkdownBearMaxLineLengthMessageTest(unittest.TestCase):
def setUp(self):
self.section = Section('name')
self.uut = MarkdownBear(self.section, Queue())
def test_invalid_message(self):
content = test_file3.splitlines()
self.section.append(Setting('max_line_length', '10'))
with prepare_file(content, None) as (file, fname):
with execute_bear(self.uut, fname, file) as results:
self.assertEqual(results[0].message,
'Line must be at most 10 characters'
' maximum-line-length remark-lint')
self.assertEqual(results[0].severity, RESULT_SEVERITY.NORMAL)
| from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.LocalBearTestHelper import verify_local_bear
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
MarkdownBear: Add test to check message for error
A better test for MarkdownBear to check the exact message
of the result for a maximum line length error.
Related to https://github.com/coala/coala-bears/issues/1235import unittest
from queue import Queue
from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.BearTestHelper import generate_skip_decorator
from coalib.testing.LocalBearTestHelper import verify_local_bear, execute_bear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
from coala_utils.ContextManagers import prepare_file
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
@generate_skip_decorator(MarkdownBear)
class MarkdownBearMaxLineLengthMessageTest(unittest.TestCase):
def setUp(self):
self.section = Section('name')
self.uut = MarkdownBear(self.section, Queue())
def test_invalid_message(self):
content = test_file3.splitlines()
self.section.append(Setting('max_line_length', '10'))
with prepare_file(content, None) as (file, fname):
with execute_bear(self.uut, fname, file) as results:
self.assertEqual(results[0].message,
'Line must be at most 10 characters'
' maximum-line-length remark-lint')
self.assertEqual(results[0].severity, RESULT_SEVERITY.NORMAL)
| <commit_before>from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.LocalBearTestHelper import verify_local_bear
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
<commit_msg>MarkdownBear: Add test to check message for error
A better test for MarkdownBear to check the exact message
of the result for a maximum line length error.
Related to https://github.com/coala/coala-bears/issues/1235<commit_after>import unittest
from queue import Queue
from bears.markdown.MarkdownBear import MarkdownBear
from coalib.testing.BearTestHelper import generate_skip_decorator
from coalib.testing.LocalBearTestHelper import verify_local_bear, execute_bear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
from coalib.settings.Section import Section
from coalib.settings.Setting import Setting
from coala_utils.ContextManagers import prepare_file
test_file1 = """1. abc
1. def
"""
test_file2 = """1. abc
2. def
"""
test_file3 = """1. abcdefghijklm
2. nopqrstuvwxyz
"""
MarkdownBearTest = verify_local_bear(MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file1,))
MarkdownBearConfigsTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file1,),
invalid_files=(test_file2,),
settings={'list_increment': False})
MarkdownBearMaxLineLengthSettingTest = verify_local_bear(
MarkdownBear,
valid_files=(test_file2,),
invalid_files=(test_file3,),
settings={'max_line_length': 10})
@generate_skip_decorator(MarkdownBear)
class MarkdownBearMaxLineLengthMessageTest(unittest.TestCase):
def setUp(self):
self.section = Section('name')
self.uut = MarkdownBear(self.section, Queue())
def test_invalid_message(self):
content = test_file3.splitlines()
self.section.append(Setting('max_line_length', '10'))
with prepare_file(content, None) as (file, fname):
with execute_bear(self.uut, fname, file) as results:
self.assertEqual(results[0].message,
'Line must be at most 10 characters'
' maximum-line-length remark-lint')
self.assertEqual(results[0].severity, RESULT_SEVERITY.NORMAL)
|
2b4ab7d50ae200afd47310c88f1e59977ef8f1e3 | flask_nav/renderers.py | flask_nav/renderers.py | from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return '<!-- no implementation in {} to render {} -->'.format(
self.__class__.__name__, node.__class__.__name__,
)
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
| from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return tags.comment(
'no implementation in {} to render {}'.format(
self.__class__.__name__, node.__class__.__name__,
))
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
| Use dominate to render comments as well. | Use dominate to render comments as well.
| Python | mit | mbr/flask-nav,mbr/flask-nav | from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return '<!-- no implementation in {} to render {} -->'.format(
self.__class__.__name__, node.__class__.__name__,
)
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
Use dominate to render comments as well. | from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return tags.comment(
'no implementation in {} to render {}'.format(
self.__class__.__name__, node.__class__.__name__,
))
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
| <commit_before>from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return '<!-- no implementation in {} to render {} -->'.format(
self.__class__.__name__, node.__class__.__name__,
)
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
<commit_msg>Use dominate to render comments as well.<commit_after> | from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return tags.comment(
'no implementation in {} to render {}'.format(
self.__class__.__name__, node.__class__.__name__,
))
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
| from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return '<!-- no implementation in {} to render {} -->'.format(
self.__class__.__name__, node.__class__.__name__,
)
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
Use dominate to render comments as well.from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return tags.comment(
'no implementation in {} to render {}'.format(
self.__class__.__name__, node.__class__.__name__,
))
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
| <commit_before>from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return '<!-- no implementation in {} to render {} -->'.format(
self.__class__.__name__, node.__class__.__name__,
)
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
<commit_msg>Use dominate to render comments as well.<commit_after>from flask import current_app
from dominate import tags
from visitor import Visitor
class BaseRenderer(Visitor):
def visit_object(self, node):
if current_app.debug:
return tags.comment(
'no implementation in {} to render {}'.format(
self.__class__.__name__, node.__class__.__name__,
))
return ''
class SimpleRenderer(BaseRenderer):
def visit_Link(self, node):
return tags.a(node.title, title=node.title, **node.attribs)
def visit_Navbar(self, node):
cont = tags.nav(_class='navbar')
for item in node.items:
cont.add(tags.li(self.visit(item)))
return cont
def visit_View(self, node):
kwargs = {}
if node.active:
kwargs['_class'] = 'active'
return tags.a(node.title,
href=node.get_url(),
title=node.title,
**kwargs)
def visit_Subgroup(self, node):
group = tags.ul()
for item in node.items:
group.add(tags.li(self.visit(item)))
return group
def visit_Separator(self, node):
return tags.span(_class='separator')
def visit_Label(self, node):
return tags.span(node.title, _class='nav-label')
|
deadcc641c0ff544e8074ad79808d3ce292892a3 | open_folder.py | open_folder.py | import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
def open_folder(path):
path = os.path.normpath(path)
format_string = {'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
os.popen(format_string % path)
| import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
| Raise exception if we don't know how to handle users operating system | Raise exception if we don't know how to handle users operating system | Python | mit | golliher/dg-tickler-file | import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
def open_folder(path):
path = os.path.normpath(path)
format_string = {'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
os.popen(format_string % path)
Raise exception if we don't know how to handle users operating system | import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
| <commit_before>import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
def open_folder(path):
path = os.path.normpath(path)
format_string = {'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
os.popen(format_string % path)
<commit_msg>Raise exception if we don't know how to handle users operating system<commit_after> | import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
| import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
def open_folder(path):
path = os.path.normpath(path)
format_string = {'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
os.popen(format_string % path)
Raise exception if we don't know how to handle users operating systemimport os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
| <commit_before>import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
def open_folder(path):
path = os.path.normpath(path)
format_string = {'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
os.popen(format_string % path)
<commit_msg>Raise exception if we don't know how to handle users operating system<commit_after>import os, platform
# I intend to hide the Operating Specific details of opening a folder
# here in this module.
#
# On Mac OS X you do this with "open"
# e.g. "open '\Users\golliher\Documents\Tickler File'"
# On Windows you do this with "explorer"
# e.g. "explorer c:\Documents and Settings\Tickler File"
# On Linux xdg-open is a desktop-independant tool
def open_folder(path):
'''Runs operating specific command to open a folder. MacOS, Linux & Windows supported'''
path = os.path.normpath(path)
try:
platform_cmd_formatstring = { 'Darwin': "open '%s'", # note the quotation marks around path
'Linux': "xdg-open '%s'",
'Windows': "explorer %s"}[platform.system()]
except:
raise Exception("Your operating system was not recognized. Unable to determine how to open folders for you.")
platform_cmd = platform_cmd_formatstring % path
os.popen(platform_cmd)
|
e92b45ad68b665095cfce5daea7ff82550fcbfb1 | psqtraviscontainer/printer.py | psqtraviscontainer/printer.py | # /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# Don't trust non-file like replacements of sys.stdout, assume
# that they can only handle ascii.
if sys.stdout.__class__ is not file or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
| # /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# If a replacement of sys.stdout doesn't have isatty, don't trust it.
if not getattr(sys.stdout, "isatty", None) or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
| Check for the isatty property on sys.stdout. | Check for the isatty property on sys.stdout.
Previously we were checking to see if it was of type "file", but
the type changed between python 2 and 3. Really, all we want
to do is check if it is a tty and if we can't be sure of that,
don't enable utf8 output.
| Python | mit | polysquare/polysquare-travis-container | # /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# Don't trust non-file like replacements of sys.stdout, assume
# that they can only handle ascii.
if sys.stdout.__class__ is not file or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
Check for the isatty property on sys.stdout.
Previously we were checking to see if it was of type "file", but
the type changed between python 2 and 3. Really, all we want
to do is check if it is a tty and if we can't be sure of that,
don't enable utf8 output. | # /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# If a replacement of sys.stdout doesn't have isatty, don't trust it.
if not getattr(sys.stdout, "isatty", None) or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
| <commit_before># /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# Don't trust non-file like replacements of sys.stdout, assume
# that they can only handle ascii.
if sys.stdout.__class__ is not file or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
<commit_msg>Check for the isatty property on sys.stdout.
Previously we were checking to see if it was of type "file", but
the type changed between python 2 and 3. Really, all we want
to do is check if it is a tty and if we can't be sure of that,
don't enable utf8 output.<commit_after> | # /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# If a replacement of sys.stdout doesn't have isatty, don't trust it.
if not getattr(sys.stdout, "isatty", None) or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
| # /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# Don't trust non-file like replacements of sys.stdout, assume
# that they can only handle ascii.
if sys.stdout.__class__ is not file or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
Check for the isatty property on sys.stdout.
Previously we were checking to see if it was of type "file", but
the type changed between python 2 and 3. Really, all we want
to do is check if it is a tty and if we can't be sure of that,
don't enable utf8 output.# /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# If a replacement of sys.stdout doesn't have isatty, don't trust it.
if not getattr(sys.stdout, "isatty", None) or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
| <commit_before># /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# Don't trust non-file like replacements of sys.stdout, assume
# that they can only handle ascii.
if sys.stdout.__class__ is not file or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
<commit_msg>Check for the isatty property on sys.stdout.
Previously we were checking to see if it was of type "file", but
the type changed between python 2 and 3. Really, all we want
to do is check if it is a tty and if we can't be sure of that,
don't enable utf8 output.<commit_after># /psqtraviscontainer/printer.py
#
# Utility functions for printing unicode text.
#
# See /LICENCE.md for Copyright information
"""Utility functions for printing unicode text."""
import sys
def unicode_safe(text):
"""Print text to standard output, handle unicode."""
# If a replacement of sys.stdout doesn't have isatty, don't trust it.
if not getattr(sys.stdout, "isatty", None) or not sys.stdout.isatty():
text = "".join([c for c in text if ord(c) < 128])
sys.stdout.write(text)
|
4e584b66db979878d413cfae4e6e9d085d40f811 | main/modelx.py | main/modelx.py | # -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self):
return '//gravatar.com/avatar/%s?d=identicon&r=x' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower()
)
| # -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self, size=80):
return '//gravatar.com/avatar/%s?d=identicon&r=x&s=%d' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
size
)
| Support for size of gravatar image | Support for size of gravatar image
Added support for the size (s) argument in the Gravatar API | Python | mit | tonyin/optionstg,gae-init/gae-init-babel,d4rr3ll/gae-init-docker,lipis/the-smallest-creature,lipis/the-smallest-creature,lipis/the-smallest-creature,gmist/alice-box,gae-init/gae-init-upload,mdxs/gae-init,mdxs/gae-init,lovesoft/gae-init,wilfriedE/gae-init,JoeyCodinja/INFO3180LAB3,topless/gae-init,gmist/five-studio2,georgekis/salary,gae-init/gae-init,tiberiucorbu/av-website,wilfriedE/gae-init,lipis/electron-crash-reporter,lipis/hurry-app,lipis/life-line,JoeyCodinja/INFO3180LAB3,lipis/github-stats,gae-init/gae-init,mdxs/gae-init-babel,gmist/nashi-5studio,michals/hurry-app,NeftaliYagua/gae-init,lovesoft/gae-init,NeftaliYagua/gae-init,d4rr3ll/gae-init-docker,topless/gae-init-upload,michals/hurry-app,gae-init/gae-init-docs,lipis/electron-crash-reporter,gmist/ctm-5studio,gmist/my-gae-init,gmist/my-gae-init,gae-init/gae-init-babel,terradigital/gae-init,gmist/ctm-5studio,lovesoft/gae-init,JoeyCodinja/INFO3180LAB3,wilfriedE/gae-init,gae-init/gae-init-docs,antotodd/lab5,gae-init/phonebook,dhstack/gae-init,gmist/five-studio2,carylF/lab5,gmist/fix-5studio,vanessa-bell/hd-kiosk-v2,gmist/ctm-5studio,gmist/1businka2,jakedotio/gae-init,gae-init/gae-init-upload,topless/gae-init,tkstman/lab5,tiberiucorbu/av-website,Kingclove/lab5info3180,terradigital/gae-init,lipis/hurry-app,lipis/gae-init,topless/gae-init,mdxs/gae-init,gmist/1businka2,CLOUGH/info3180-lab5,gae-init/gae-init-debug,tiberiucorbu/av-website,gae-init/gae-init,gae-init/gae-init-debug,gae-init/gae-init-debug,gmist/ctm-5studio,topless/gae-init-upload,topless/gae-init,lipis/meet-notes,michals/hurry-app,gae-init/gae-init-babel,gmist/fix-5studio,lipis/github-stats,jakedotio/gae-init,gmist/1businka2,vanessa-bell/hd-kiosk-v2,vanessa-bell/hd-kiosk-v2,gae-init/gae-init,CLOUGH/info3180-lab5,lipis/guestbook,terradigital/gae-init,d4rr3ll/gae-init-docker,gmist/my-gae-init-auth,chineyting/lab5-Info3180,gmist/five-studio2,tkstman/lab5,gmist/fix-5studio,mdxs/gae-init-docs,lipis/github-stats,lipis/electron-crash-reporter,tonyin/optionstg,georgekis/salary,gae-init/gae-init-docs,lipis/gae-init,mdxs/gae-init-babel,gmist/alice-box,JoeyCodinja/INFO3180LAB3,NeftaliYagua/gae-init,carylF/lab5,gae-init/gae-init-babel,lipis/life-line,gmist/my-gae-init,antotodd/lab5,mdxs/gae-init-babel,jaja14/lab5,gae-init/gae-init-docs,gmist/my-gae-init,gae-init/gae-init-debug,lipis/meet-notes,wodore/wodore-gae,gmist/five-studio2,dhstack/gae-init,chineyting/lab5-Info3180,lipis/gae-init,gmist/fix-5studio,jaja14/lab5,jakedotio/gae-init,gmist/nashi-5studio,gmist/nashi-5studio,mdxs/gae-init,lipis/github-stats,jakedotio/gae-init,gae-init/gae-init-upload,lipis/meet-notes,Kingclove/lab5info3180,gae-init/gae-init-upload,wodore/wodore-gae,wodore/wodore-gae,lipis/hurry-app,d4rr3ll/gae-init-docker,topless/gae-init-upload,lipis/gae-init,dhstack/gae-init,lipis/life-line,vanessa-bell/hd-kiosk-v2,wodore/wodore-gae,georgekis/salary | # -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self):
return '//gravatar.com/avatar/%s?d=identicon&r=x' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower()
)
Support for size of gravatar image
Added support for the size (s) argument in the Gravatar API | # -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self, size=80):
return '//gravatar.com/avatar/%s?d=identicon&r=x&s=%d' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
size
)
| <commit_before># -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self):
return '//gravatar.com/avatar/%s?d=identicon&r=x' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower()
)
<commit_msg>Support for size of gravatar image
Added support for the size (s) argument in the Gravatar API<commit_after> | # -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self, size=80):
return '//gravatar.com/avatar/%s?d=identicon&r=x&s=%d' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
size
)
| # -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self):
return '//gravatar.com/avatar/%s?d=identicon&r=x' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower()
)
Support for size of gravatar image
Added support for the size (s) argument in the Gravatar API# -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self, size=80):
return '//gravatar.com/avatar/%s?d=identicon&r=x&s=%d' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
size
)
| <commit_before># -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self):
return '//gravatar.com/avatar/%s?d=identicon&r=x' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower()
)
<commit_msg>Support for size of gravatar image
Added support for the size (s) argument in the Gravatar API<commit_after># -*- coding: utf-8 -*-
import hashlib
class BaseX(object):
@classmethod
def retrieve_one_by(cls, name, value):
cls_db_list = cls.query(getattr(cls, name) == value).fetch(1)
if cls_db_list:
return cls_db_list[0]
return None
class ConfigX(object):
@classmethod
def get_master_db(cls):
return cls.get_or_insert('master')
class UserX(object):
@property
def avatar_url(self, size=80):
return '//gravatar.com/avatar/%s?d=identicon&r=x&s=%d' % (
hashlib.md5((self.email or self.name).encode('utf-8')).hexdigest().lower(),
size
)
|
edae0479d4de3c8c1d61f80ea5366c075b807125 | mooch/banktransfer.py | mooch/banktransfer.py | from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
| from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.charged_at = timezone.now()
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
| Set charged_at when confirming a bank transfer | Set charged_at when confirming a bank transfer
| Python | mit | matthiask/django-mooch,matthiask/django-mooch,matthiask/django-mooch | from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
Set charged_at when confirming a bank transfer | from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.charged_at = timezone.now()
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
| <commit_before>from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
<commit_msg>Set charged_at when confirming a bank transfer<commit_after> | from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.charged_at = timezone.now()
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
| from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
Set charged_at when confirming a bank transferfrom django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.charged_at = timezone.now()
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
| <commit_before>from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
<commit_msg>Set charged_at when confirming a bank transfer<commit_after>from django import http
from django.conf.urls import url
from django.shortcuts import get_object_or_404
from django.template.loader import render_to_string
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from mooch.base import BaseMoocher, require_POST_m
from mooch.signals import post_charge
class BankTransferMoocher(BaseMoocher):
identifier = 'banktransfer'
title = _('Pay by bank transfer')
def get_urls(self):
return [
url('^confirm/$', self.confirm_view, name='banktransfer_confirm'),
]
def payment_form(self, request, payment):
return render_to_string('mooch/banktransfer_payment_form.html', {
'payment': payment,
'moocher': self,
}, request=request)
@require_POST_m
def confirm_view(self, request):
instance = get_object_or_404(self.model, id=request.POST.get('id'))
instance.payment_service_provider = self.identifier
instance.charged_at = timezone.now()
instance.transaction = repr(request.META.copy())
instance.save()
post_charge.send(
sender=self.__class__,
payment=instance,
request=request,
)
return http.HttpResponseRedirect('/') # TODO
|
d63480d00206a08a3e41c6af7512181198aced05 | object_join.py | object_join.py | __author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.left.__class__.__name__, attr))
| __author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def __dir__(self):
attrs = list(set(dir(self.left) + dir(self.right) + ['left', 'right']))
return sorted(attrs)
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.right.__class__.__name__, attr))
| Add proper `__dir__` reporting, fix bug in AttributeError | Add proper `__dir__` reporting, fix bug in AttributeError
| Python | mit | StuartAxelOwen/datastreams | __author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.left.__class__.__name__, attr))
Add proper `__dir__` reporting, fix bug in AttributeError | __author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def __dir__(self):
attrs = list(set(dir(self.left) + dir(self.right) + ['left', 'right']))
return sorted(attrs)
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.right.__class__.__name__, attr))
| <commit_before>__author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.left.__class__.__name__, attr))
<commit_msg>Add proper `__dir__` reporting, fix bug in AttributeError<commit_after> | __author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def __dir__(self):
attrs = list(set(dir(self.left) + dir(self.right) + ['left', 'right']))
return sorted(attrs)
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.right.__class__.__name__, attr))
| __author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.left.__class__.__name__, attr))
Add proper `__dir__` reporting, fix bug in AttributeError__author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def __dir__(self):
attrs = list(set(dir(self.left) + dir(self.right) + ['left', 'right']))
return sorted(attrs)
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.right.__class__.__name__, attr))
| <commit_before>__author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.left.__class__.__name__, attr))
<commit_msg>Add proper `__dir__` reporting, fix bug in AttributeError<commit_after>__author__ = 'stuart'
class JoinedObject(object):
def __init__(self, left, right):
self.left = left
self.right = right
def __getattr__(self, attr):
if attr == 'left':
return self.left
elif attr == 'right':
return self.right
else:
return self.get_from_sources(attr)
def __repr__(self):
return '<{} object at {}>'.format(
self.left.__class__.__name__ + self.right.__class__.__name__,
id(self))
def __dir__(self):
attrs = list(set(dir(self.left) + dir(self.right) + ['left', 'right']))
return sorted(attrs)
def get_from_sources(self, attr):
if hasattr(self.left, attr):
return getattr(self.left, attr)
elif hasattr(self.right, attr):
return getattr(self.right, attr)
else:
raise AttributeError(
"Neither of joined object's parents ({}, {}), have attribute "
"'{}'".format(self.left.__class__.__name__,
self.right.__class__.__name__, attr))
|
403250e91905079c7480bb8ea54cf2d2a301022f | moto/s3/urls.py | moto/s3/urls.py | from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
}
| from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
| Fix S3 URL Regex to allow slashes in key names. | Fix S3 URL Regex to allow slashes in key names.
| Python | apache-2.0 | heddle317/moto,gjtempleton/moto,rocky4570/moto,jszwedko/moto,dbfr3qs/moto,okomestudio/moto,whummer/moto,2rs2ts/moto,Brett55/moto,ZuluPro/moto,whummer/moto,kefo/moto,ZuluPro/moto,botify-labs/moto,rocky4570/moto,rocky4570/moto,whummer/moto,spulec/moto,Brett55/moto,william-richard/moto,2rs2ts/moto,rocky4570/moto,jrydberg/moto,gjtempleton/moto,Affirm/moto,botify-labs/moto,heddle317/moto,kennethd/moto,botify-labs/moto,kefo/moto,kefo/moto,Affirm/moto,im-auld/moto,ZuluPro/moto,botify-labs/moto,dbfr3qs/moto,okomestudio/moto,william-richard/moto,rocky4570/moto,Brett55/moto,pior/moto,okomestudio/moto,Affirm/moto,Brett55/moto,DataDog/moto,whummer/moto,IlyaSukhanov/moto,2rs2ts/moto,spulec/moto,spulec/moto,Affirm/moto,Brett55/moto,Brett55/moto,whummer/moto,whummer/moto,2rs2ts/moto,heddle317/moto,gjtempleton/moto,okomestudio/moto,heddle317/moto,zonk1024/moto,spulec/moto,behanceops/moto,dbfr3qs/moto,dbfr3qs/moto,dbfr3qs/moto,ZuluPro/moto,mrucci/moto,kefo/moto,alexdebrie/moto,rouge8/moto,dbfr3qs/moto,Affirm/moto,2mf/moto,gjtempleton/moto,ZuluPro/moto,spulec/moto,ZuluPro/moto,heddle317/moto,okomestudio/moto,Affirm/moto,kefo/moto,william-richard/moto,william-richard/moto,ImmobilienScout24/moto,william-richard/moto,rocky4570/moto,braintreeps/moto,okomestudio/moto,andresriancho/moto,araines/moto,2rs2ts/moto,ludia/moto,spulec/moto,silveregg/moto,gjtempleton/moto,riccardomc/moto,jotes/moto,tootedom/moto,botify-labs/moto,EarthmanT/moto,botify-labs/moto,william-richard/moto | from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
}
Fix S3 URL Regex to allow slashes in key names. | from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
| <commit_before>from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
}
<commit_msg>Fix S3 URL Regex to allow slashes in key names.<commit_after> | from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
| from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
}
Fix S3 URL Regex to allow slashes in key names.from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
| <commit_before>from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>[a-zA-Z0-9\-_.]+)': S3ResponseInstance.key_response,
}
<commit_msg>Fix S3 URL Regex to allow slashes in key names.<commit_after>from .responses import S3ResponseInstance
url_bases = [
"https?://(?P<bucket_name>[a-zA-Z0-9\-_.]*)\.?s3.amazonaws.com"
]
url_paths = {
'{0}/$': S3ResponseInstance.bucket_response,
'{0}/(?P<key_name>.+)': S3ResponseInstance.key_response,
}
|
802bed896c147fc6bb6dc72f62a80236bc3cd263 | soccermetrics/rest/resources/personnel.py | soccermetrics/rest/resources/personnel.py | from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on all personnel involved in a football match – players,
managers, and match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource | from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on the following personnel involved in a football match:
* Players,
* Managers,
* Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource | Remove stray non-ASCII character in docstring | Remove stray non-ASCII character in docstring
| Python | mit | soccermetrics/soccermetrics-client-py | from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on all personnel involved in a football match – players,
managers, and match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resourceRemove stray non-ASCII character in docstring | from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on the following personnel involved in a football match:
* Players,
* Managers,
* Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource | <commit_before>from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on all personnel involved in a football match – players,
managers, and match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource<commit_msg>Remove stray non-ASCII character in docstring<commit_after> | from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on the following personnel involved in a football match:
* Players,
* Managers,
* Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource | from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on all personnel involved in a football match – players,
managers, and match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resourceRemove stray non-ASCII character in docstringfrom soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on the following personnel involved in a football match:
* Players,
* Managers,
* Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource | <commit_before>from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on all personnel involved in a football match – players,
managers, and match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource<commit_msg>Remove stray non-ASCII character in docstring<commit_after>from soccermetrics.rest.resources import Resource
class Personnel(Resource):
"""
Represents a Personnel REST resource (/personnel/<resource> endpoint).
The Personnel resources let you access biographic and demographic
data on the following personnel involved in a football match:
* Players,
* Managers,
* Match referees.
Derived from :class:`Resource`.
"""
def __init__(self, resource, base_uri, auth):
"""
Constructor of Personnel class.
:param resource: Name of resource.
:type resource: string
:param base_uri: Base URI of API.
:type base_uri: string
:param auth: Authentication credential.
:type auth: tuple
"""
super(Personnel, self).__init__(base_uri,auth)
self.endpoint += "/personnel/%s" % resource |
8068afed19a6596a5fbed364c19571c44970fc67 | src/poliastro/tests/test_jit.py | src/poliastro/tests/test_jit.py | from poliastro import jit
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
_fake_numba_import()
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
| from contextlib import contextmanager
from poliastro import jit
@contextmanager
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
yield
sys.meta_path.remove(fail_loader)
import numba
from poliastro import jit
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
with _fake_numba_import():
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
| Make numba fake import robust | Make numba fake import robust
| Python | mit | anhiga/poliastro,newlawrence/poliastro,Juanlu001/poliastro,newlawrence/poliastro,Juanlu001/poliastro,anhiga/poliastro,poliastro/poliastro,Juanlu001/poliastro,anhiga/poliastro,newlawrence/poliastro | from poliastro import jit
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
_fake_numba_import()
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
Make numba fake import robust | from contextlib import contextmanager
from poliastro import jit
@contextmanager
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
yield
sys.meta_path.remove(fail_loader)
import numba
from poliastro import jit
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
with _fake_numba_import():
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
| <commit_before>from poliastro import jit
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
_fake_numba_import()
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
<commit_msg>Make numba fake import robust<commit_after> | from contextlib import contextmanager
from poliastro import jit
@contextmanager
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
yield
sys.meta_path.remove(fail_loader)
import numba
from poliastro import jit
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
with _fake_numba_import():
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
| from poliastro import jit
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
_fake_numba_import()
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
Make numba fake import robustfrom contextlib import contextmanager
from poliastro import jit
@contextmanager
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
yield
sys.meta_path.remove(fail_loader)
import numba
from poliastro import jit
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
with _fake_numba_import():
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
| <commit_before>from poliastro import jit
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
_fake_numba_import()
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
<commit_msg>Make numba fake import robust<commit_after>from contextlib import contextmanager
from poliastro import jit
@contextmanager
def _fake_numba_import():
# Black magic, beware
# https://stackoverflow.com/a/2484402/554319
import sys
class FakeImportFailure:
def __init__(self, modules):
self.modules = modules
def find_module(self, fullname, *args, **kwargs):
if fullname in self.modules:
raise ImportError('Debug import failure for %s' % fullname)
fail_loader = FakeImportFailure(['numba'])
import poliastro.jit
from poliastro import jit
del poliastro.jit
del jit
del sys.modules['poliastro.jit']
del sys.modules['numba']
sys.meta_path.insert(0, fail_loader)
yield
sys.meta_path.remove(fail_loader)
import numba
from poliastro import jit
def test_ijit_returns_same_function_without_args():
def expected_foo():
return True
foo = jit.ijit(expected_foo)
assert foo is expected_foo
def test_ijit_returns_same_function_with_args():
def expected_foo():
return True
foo = jit.ijit(1)(expected_foo)
assert foo is expected_foo
def test_no_numba_emits_warning(recwarn):
with _fake_numba_import():
from poliastro import jit
assert len(recwarn) == 1
w = recwarn.pop(UserWarning)
assert issubclass(w.category, UserWarning)
assert "Could not import numba package" in str(w.message)
|
4a62214f0c9e8789b8453a48c0a880c4ac6236cb | saleor/product/migrations/0123_auto_20200904_1251.py | saleor/product/migrations/0123_auto_20200904_1251.py | # Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
| # Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
from django.db.models import Count
def remove_variant_image_duplicates(apps, schema_editor):
ProductImage = apps.get_model("product", "ProductImage")
VariantImage = apps.get_model("product", "VariantImage")
duplicated_images = (
ProductImage.objects.values("pk", "variant_images__variant")
.annotate(variant_count=Count("variant_images__variant"))
.filter(variant_count__gte=2)
)
variant_image_ids_to_remove = []
for image_data in duplicated_images:
ids = VariantImage.objects.filter(
variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
)[1:].values_list("pk", flat=True)
variant_image_ids_to_remove += ids
VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.RunPython(
remove_variant_image_duplicates, migrations.RunPython.noop
),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
| Drop duplicated VariantImages before migration to unique together | Drop duplicated VariantImages before migration to unique together
| Python | bsd-3-clause | mociepka/saleor,mociepka/saleor,mociepka/saleor | # Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
Drop duplicated VariantImages before migration to unique together | # Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
from django.db.models import Count
def remove_variant_image_duplicates(apps, schema_editor):
ProductImage = apps.get_model("product", "ProductImage")
VariantImage = apps.get_model("product", "VariantImage")
duplicated_images = (
ProductImage.objects.values("pk", "variant_images__variant")
.annotate(variant_count=Count("variant_images__variant"))
.filter(variant_count__gte=2)
)
variant_image_ids_to_remove = []
for image_data in duplicated_images:
ids = VariantImage.objects.filter(
variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
)[1:].values_list("pk", flat=True)
variant_image_ids_to_remove += ids
VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.RunPython(
remove_variant_image_duplicates, migrations.RunPython.noop
),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
| <commit_before># Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
<commit_msg>Drop duplicated VariantImages before migration to unique together<commit_after> | # Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
from django.db.models import Count
def remove_variant_image_duplicates(apps, schema_editor):
ProductImage = apps.get_model("product", "ProductImage")
VariantImage = apps.get_model("product", "VariantImage")
duplicated_images = (
ProductImage.objects.values("pk", "variant_images__variant")
.annotate(variant_count=Count("variant_images__variant"))
.filter(variant_count__gte=2)
)
variant_image_ids_to_remove = []
for image_data in duplicated_images:
ids = VariantImage.objects.filter(
variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
)[1:].values_list("pk", flat=True)
variant_image_ids_to_remove += ids
VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.RunPython(
remove_variant_image_duplicates, migrations.RunPython.noop
),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
| # Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
Drop duplicated VariantImages before migration to unique together# Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
from django.db.models import Count
def remove_variant_image_duplicates(apps, schema_editor):
ProductImage = apps.get_model("product", "ProductImage")
VariantImage = apps.get_model("product", "VariantImage")
duplicated_images = (
ProductImage.objects.values("pk", "variant_images__variant")
.annotate(variant_count=Count("variant_images__variant"))
.filter(variant_count__gte=2)
)
variant_image_ids_to_remove = []
for image_data in duplicated_images:
ids = VariantImage.objects.filter(
variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
)[1:].values_list("pk", flat=True)
variant_image_ids_to_remove += ids
VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.RunPython(
remove_variant_image_duplicates, migrations.RunPython.noop
),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
| <commit_before># Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
<commit_msg>Drop duplicated VariantImages before migration to unique together<commit_after># Generated by Django 3.1 on 2020-09-04 12:51
from django.db import migrations
from django.db.models import Count
def remove_variant_image_duplicates(apps, schema_editor):
ProductImage = apps.get_model("product", "ProductImage")
VariantImage = apps.get_model("product", "VariantImage")
duplicated_images = (
ProductImage.objects.values("pk", "variant_images__variant")
.annotate(variant_count=Count("variant_images__variant"))
.filter(variant_count__gte=2)
)
variant_image_ids_to_remove = []
for image_data in duplicated_images:
ids = VariantImage.objects.filter(
variant=image_data["variant_images__variant"], image__pk=image_data["pk"],
)[1:].values_list("pk", flat=True)
variant_image_ids_to_remove += ids
VariantImage.objects.filter(pk__in=variant_image_ids_to_remove).delete()
class Migration(migrations.Migration):
dependencies = [
("product", "0122_auto_20200828_1135"),
]
operations = [
migrations.RunPython(
remove_variant_image_duplicates, migrations.RunPython.noop
),
migrations.AlterUniqueTogether(
name="variantimage", unique_together={("variant", "image")},
),
]
|
874acdfcca178759e39c154f5f2c844710db7ab0 | json_field/__init__.py | json_field/__init__.py | try:
from json_field.fields import JSONField
except ImportError:
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
| from django.core.exceptions import ImproperlyConfigured
try:
from json_field.fields import JSONField
except (ImportError, ImproperlyConfigured):
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
| Fix install with Django 1.5. | Fix install with Django 1.5.
| Python | bsd-3-clause | doordash/django-json-field,hoh/django-json-field,EyePulp/django-json-field,derek-schaefer/django-json-field,matllubos/django-json-field | try:
from json_field.fields import JSONField
except ImportError:
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
Fix install with Django 1.5. | from django.core.exceptions import ImproperlyConfigured
try:
from json_field.fields import JSONField
except (ImportError, ImproperlyConfigured):
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
| <commit_before>try:
from json_field.fields import JSONField
except ImportError:
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
<commit_msg>Fix install with Django 1.5.<commit_after> | from django.core.exceptions import ImproperlyConfigured
try:
from json_field.fields import JSONField
except (ImportError, ImproperlyConfigured):
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
| try:
from json_field.fields import JSONField
except ImportError:
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
Fix install with Django 1.5.from django.core.exceptions import ImproperlyConfigured
try:
from json_field.fields import JSONField
except (ImportError, ImproperlyConfigured):
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
| <commit_before>try:
from json_field.fields import JSONField
except ImportError:
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
<commit_msg>Fix install with Django 1.5.<commit_after>from django.core.exceptions import ImproperlyConfigured
try:
from json_field.fields import JSONField
except (ImportError, ImproperlyConfigured):
pass # fails when imported by setup.py, no worries
__version__ = '0.4.2'
|
770c898e205e1c927e3371402a3ebba32471d4a7 | actions/run.py | actions/run.py | import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
self.logger.info(data)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action %s: %s \
(status code: %s)' % (end_point, response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
| import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action: %s \
(status code: %s)' % (response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
| Remove extraneous logging and bad param | Remove extraneous logging and bad param
| Python | apache-2.0 | DoriftoShoes/activecampaign | import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
self.logger.info(data)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action %s: %s \
(status code: %s)' % (end_point, response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
Remove extraneous logging and bad param | import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action: %s \
(status code: %s)' % (response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
| <commit_before>import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
self.logger.info(data)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action %s: %s \
(status code: %s)' % (end_point, response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
<commit_msg>Remove extraneous logging and bad param<commit_after> | import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action: %s \
(status code: %s)' % (response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
| import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
self.logger.info(data)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action %s: %s \
(status code: %s)' % (end_point, response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
Remove extraneous logging and bad paramimport requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action: %s \
(status code: %s)' % (response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
| <commit_before>import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
self.logger.info(data)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action %s: %s \
(status code: %s)' % (end_point, response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
<commit_msg>Remove extraneous logging and bad param<commit_after>import requests
import urllib
import urlparse
from st2actions.runners.pythonrunner import Action
class ActiveCampaignAction(Action):
def run(self, **kwargs):
if kwargs['api_key'] is None:
kwargs['api_key'] = self.config['api_key']
return self._get_request(kwargs)
def _get_request(self, params):
url = urlparse.urljoin(self.config['url'], 'admin/api.php')
headers = {}
headers['Content-Type'] = 'application/x-www-form-urlencoded'
params = self._format_params(params)
data = urllib.urlencode(params)
response = requests.get(url=url,
headers=headers, params=data)
results = response.json()
if results['result_code'] is not 1:
failure_reason = ('Failed to perform action: %s \
(status code: %s)' % (response.text,
response.status_code))
self.logger.exception(failure_reason)
raise Exception(failure_reason)
return results
def _format_params(self, params):
output = {}
for k, v in params.iteritems():
if isinstance(v, dict):
print type(v)
for pk, pv in v.iteritems():
param_name = "%s[%s]" % (k, pk)
output[param_name] = pv
else:
output[k] = v
return output
|
cf248e9c5ea0091ea8262c865d65bde9be267d89 | backend/uclapi/uclapi/urls.py | backend/uclapi/uclapi/urls.py | """uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^v0/roombookings/', include('roombookings.urls')),
]
| """uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^roombookings/', include('roombookings.urls')),
]
| Remove versioning from URL scheme | Remove versioning from URL scheme
| Python | mit | uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi | """uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^v0/roombookings/', include('roombookings.urls')),
]
Remove versioning from URL scheme | """uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^roombookings/', include('roombookings.urls')),
]
| <commit_before>"""uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^v0/roombookings/', include('roombookings.urls')),
]
<commit_msg>Remove versioning from URL scheme<commit_after> | """uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^roombookings/', include('roombookings.urls')),
]
| """uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^v0/roombookings/', include('roombookings.urls')),
]
Remove versioning from URL scheme"""uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^roombookings/', include('roombookings.urls')),
]
| <commit_before>"""uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^v0/roombookings/', include('roombookings.urls')),
]
<commit_msg>Remove versioning from URL scheme<commit_after>"""uclapi URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^dashboard/', include('dashboard.urls')),
url(r'^roombookings/', include('roombookings.urls')),
]
|
13173bffbffeb5e7b30f5b14351a7d33f0a3b110 | examples/translations/portuguese_test_1.py | examples/translations/portuguese_test_1.py | # Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.js_digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.js_digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.js_digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
| # Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
| Update the Portuguese example test | Update the Portuguese example test
| Python | mit | seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase | # Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.js_digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.js_digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.js_digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
Update the Portuguese example test | # Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
| <commit_before># Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.js_digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.js_digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.js_digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
<commit_msg>Update the Portuguese example test<commit_after> | # Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
| # Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.js_digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.js_digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.js_digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
Update the Portuguese example test# Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
| <commit_before># Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.js_digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.js_digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.js_digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
<commit_msg>Update the Portuguese example test<commit_after># Portuguese Language Test
from seleniumbase.translate.portuguese import CasoDeTeste
class MinhaClasseDeTeste(CasoDeTeste):
def test_exemplo_1(self):
self.abrir("https://pt.wikipedia.org/wiki/")
self.verificar_texto("Wikipédia")
self.verificar_elemento('[title="Língua portuguesa"]')
self.digitar("#searchform input", "João Pessoa")
self.clique("#searchform button")
self.verificar_texto("João Pessoa", "#firstHeading")
self.verificar_elemento('img[alt*="João Pessoa"]')
self.digitar("#searchform input", "Florianópolis")
self.clique("#searchform button")
self.verificar_texto("Florianópolis", "h1#firstHeading")
self.verificar_elemento('td:contains("Avenida Beira-Mar")')
self.voltar()
self.verificar_verdade("João" in self.obter_url_atual())
self.digitar("#searchform input", "Teatro Amazonas")
self.clique("#searchform button")
self.verificar_texto("Teatro Amazonas", "#firstHeading")
self.verificar_texto_do_link("Festival Amazonas de Ópera")
|
615d036c6735d94609d6398dff676cd8e3a8f58a | app/__init__.py | app/__init__.py | import os
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
| from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__,
static_folder='static/',
static_url_path=config[config_name].STATIC_URL_PATH)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
| Set static path for the Flask app | Set static path for the Flask app
| Python | mit | alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend | import os
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
Set static path for the Flask app | from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__,
static_folder='static/',
static_url_path=config[config_name].STATIC_URL_PATH)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
| <commit_before>import os
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
<commit_msg>Set static path for the Flask app<commit_after> | from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__,
static_folder='static/',
static_url_path=config[config_name].STATIC_URL_PATH)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
| import os
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
Set static path for the Flask appfrom flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__,
static_folder='static/',
static_url_path=config[config_name].STATIC_URL_PATH)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
| <commit_before>import os
from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
<commit_msg>Set static path for the Flask app<commit_after>from flask import Flask
from flask.ext.bootstrap import Bootstrap
from config import config
from datetime import timedelta
from .main import main as main_blueprint
from .main.helpers.auth import requires_auth
bootstrap = Bootstrap()
def create_app(config_name):
application = Flask(__name__,
static_folder='static/',
static_url_path=config[config_name].STATIC_URL_PATH)
application.config.from_object(config[config_name])
config[config_name].init_app(application)
bootstrap.init_app(application)
application.register_blueprint(main_blueprint)
main_blueprint.config = application.config.copy()
if application.config['AUTHENTICATION']:
application.permanent_session_lifetime = timedelta(minutes=60)
application.before_request(requires_auth)
return application
|
c86c90f5be35359b5bd87b956bdd0d7d0021cfea | busstops/management/commands/import_scotch_operator_contacts.py | busstops/management/commands/import_scotch_operator_contacts.py | """
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.get(pk=row['NOCCODE'])
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
| """
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.filter(pk=row['NOCCODE']).first()
if not operator:
return
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
| Fix scotch operator contact import | Fix scotch operator contact import
| Python | mpl-2.0 | stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk | """
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.get(pk=row['NOCCODE'])
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
Fix scotch operator contact import | """
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.filter(pk=row['NOCCODE']).first()
if not operator:
return
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
| <commit_before>"""
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.get(pk=row['NOCCODE'])
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
<commit_msg>Fix scotch operator contact import<commit_after> | """
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.filter(pk=row['NOCCODE']).first()
if not operator:
return
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
| """
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.get(pk=row['NOCCODE'])
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
Fix scotch operator contact import"""
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.filter(pk=row['NOCCODE']).first()
if not operator:
return
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
| <commit_before>"""
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.get(pk=row['NOCCODE'])
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
<commit_msg>Fix scotch operator contact import<commit_after>"""
Usage:
./manage.py import_scotch_operator_contacts < NOC_DB.csv
"""
import requests
from ..import_from_csv import ImportFromCSVCommand
from ...models import Operator
class Command(ImportFromCSVCommand):
scotch_operators = {
operator['code']: operator
for operator in requests.get('http://www.travelinescotland.com/lts/operatorList').json()['body']
}
@classmethod
def handle_row(cls, row):
if row['SC']:
scotch = cls.scotch_operators.get(row['SC'])
if scotch and len(row['NOCCODE']) == 4:
operator = Operator.objects.filter(pk=row['NOCCODE']).first()
if not operator:
return
operator.name = scotch['name']
operator.address = scotch['address']
operator.url = scotch['url']
operator.email = scotch['email']
operator.phone = scotch['phone']
operator.save()
@staticmethod
def process_rows(rows):
return sorted(rows, reverse=True,
key=lambda r: (r['Duplicate'] != 'OK', r['Date Ceased']))
|
582428262daa447d3c4cc06c1b7961fdafd96b59 | src/zeit/content/volume/tests/test_reference.py | src/zeit/content/volume/tests/test_reference.py | import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
self.reference_container = zeit.content.article.edit.volume.Volume(
self.volume, self.volume.xml)
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
result = zope.component.getMultiAdapter(
(self.reference_container, self.volume.xml),
zeit.cms.content.interfaces.IReference, name='related')
result.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(result))
self.assertEqual('Test teaser', result.xml.teaserText.text)
| import lxml.objectify
import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
node = zope.component.getAdapter(
self.volume, zeit.cms.content.interfaces.IXMLReference,
name='related')
source = zeit.content.article.edit.volume.Volume(
None, lxml.objectify.XML('<volume/>'))
reference = zope.component.getMultiAdapter(
(source, node),
zeit.cms.content.interfaces.IReference, name='related')
reference.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(reference))
self.assertEqual('Test teaser', reference.xml.teaserText.text)
| Rewrite test to how IReference actually works | BUG-633: Rewrite test to how IReference actually works
| Python | bsd-3-clause | ZeitOnline/zeit.content.volume,ZeitOnline/zeit.content.volume | import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
self.reference_container = zeit.content.article.edit.volume.Volume(
self.volume, self.volume.xml)
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
result = zope.component.getMultiAdapter(
(self.reference_container, self.volume.xml),
zeit.cms.content.interfaces.IReference, name='related')
result.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(result))
self.assertEqual('Test teaser', result.xml.teaserText.text)
BUG-633: Rewrite test to how IReference actually works | import lxml.objectify
import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
node = zope.component.getAdapter(
self.volume, zeit.cms.content.interfaces.IXMLReference,
name='related')
source = zeit.content.article.edit.volume.Volume(
None, lxml.objectify.XML('<volume/>'))
reference = zope.component.getMultiAdapter(
(source, node),
zeit.cms.content.interfaces.IReference, name='related')
reference.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(reference))
self.assertEqual('Test teaser', reference.xml.teaserText.text)
| <commit_before>import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
self.reference_container = zeit.content.article.edit.volume.Volume(
self.volume, self.volume.xml)
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
result = zope.component.getMultiAdapter(
(self.reference_container, self.volume.xml),
zeit.cms.content.interfaces.IReference, name='related')
result.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(result))
self.assertEqual('Test teaser', result.xml.teaserText.text)
<commit_msg>BUG-633: Rewrite test to how IReference actually works<commit_after> | import lxml.objectify
import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
node = zope.component.getAdapter(
self.volume, zeit.cms.content.interfaces.IXMLReference,
name='related')
source = zeit.content.article.edit.volume.Volume(
None, lxml.objectify.XML('<volume/>'))
reference = zope.component.getMultiAdapter(
(source, node),
zeit.cms.content.interfaces.IReference, name='related')
reference.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(reference))
self.assertEqual('Test teaser', reference.xml.teaserText.text)
| import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
self.reference_container = zeit.content.article.edit.volume.Volume(
self.volume, self.volume.xml)
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
result = zope.component.getMultiAdapter(
(self.reference_container, self.volume.xml),
zeit.cms.content.interfaces.IReference, name='related')
result.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(result))
self.assertEqual('Test teaser', result.xml.teaserText.text)
BUG-633: Rewrite test to how IReference actually worksimport lxml.objectify
import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
node = zope.component.getAdapter(
self.volume, zeit.cms.content.interfaces.IXMLReference,
name='related')
source = zeit.content.article.edit.volume.Volume(
None, lxml.objectify.XML('<volume/>'))
reference = zope.component.getMultiAdapter(
(source, node),
zeit.cms.content.interfaces.IReference, name='related')
reference.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(reference))
self.assertEqual('Test teaser', reference.xml.teaserText.text)
| <commit_before>import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
self.reference_container = zeit.content.article.edit.volume.Volume(
self.volume, self.volume.xml)
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
result = zope.component.getMultiAdapter(
(self.reference_container, self.volume.xml),
zeit.cms.content.interfaces.IReference, name='related')
result.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(result))
self.assertEqual('Test teaser', result.xml.teaserText.text)
<commit_msg>BUG-633: Rewrite test to how IReference actually works<commit_after>import lxml.objectify
import zeit.cms.content.interfaces
import zeit.content.article.edit.volume
import zeit.content.volume.testing
import zope.component
class VolumeReferenceTest(zeit.content.volume.testing.FunctionalTestCase):
def setUp(self):
from zeit.content.volume.volume import Volume
super(VolumeReferenceTest, self).setUp()
self.repository['testvolume'] = Volume()
self.volume = self.repository['testvolume']
def test_volume_can_be_adapted_to_IXMLReference(self):
result = zope.component.getAdapter(
self.volume,
zeit.cms.content.interfaces.IXMLReference,
name='related')
self.assertEqual('volume', result.tag)
self.assertEqual(self.volume.uniqueId, result.get('href'))
def test_volume_can_be_adapted_to_IReference(self):
from zeit.content.volume.interfaces import IVolumeReference
node = zope.component.getAdapter(
self.volume, zeit.cms.content.interfaces.IXMLReference,
name='related')
source = zeit.content.article.edit.volume.Volume(
None, lxml.objectify.XML('<volume/>'))
reference = zope.component.getMultiAdapter(
(source, node),
zeit.cms.content.interfaces.IReference, name='related')
reference.teaserText = 'Test teaser'
self.assertEqual(True, IVolumeReference.providedBy(reference))
self.assertEqual('Test teaser', reference.xml.teaserText.text)
|
8be5e7f7945a47dd0cc6efb57d882f10c9686f2f | pava/demo.py | pava/demo.py | import pava
# Tell pava where it can find Java user-defined classes
pava.set_classpath(['c:/Users/laffr/PycharmProjects/pava/pava'])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
# Load a Java class and call a static method on it from Python
import classfiles
classfiles.HelloWorld.main()
| import os
import pava
# Tell pava where it can find Java user-defined classes
print '1. Loading Java...'
pava.set_classpath([os.path.dirname(__file__)])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
#
# Load the "HelloWorld" Java class and call the static "main" method on it directly from Python
#
# HelloWorld.class gets compiled to Python bytecodes as follows:
#
# Python Bytecodes generated by Pava Original Java bytecodes in HelloWorld.class
# ----------------------------------------------------------------------------------------------------------------------
# 0 LOAD_GLOBAL 0 java ### 0 getstatic ('java/lang/System', 'Ljava/io/PrintStream;', 'out')
# 3 LOAD_ATTR 1 lang
# 6 LOAD_ATTR 2 System
# 9 LOAD_ATTR 3 out
# 12 LOAD_ATTR 4 println
# 15 LOAD_CONST 1 u'Hello World!' ### 3 ldc (u'Hello World!',)
# 18 CALL_FUNCTION 1 ### 5 invokevirtual ('java/io/PrintStream', 'println', '(Ljava/lang/String;)V', 1)
# 21 POP_TOP
# 22 LOAD_CONST 2 0 ### 8 return ()
# 25 RETURN_VALUE
# ---------------------------------------------------------------------------------------------------------------------
#
print '2. Import the Python module that contains the transpiled HelloWorld:'
import classfiles
print '3. Call HelloWorld.main:'
classfiles.HelloWorld.main()
print '4. Done.'
| Make the classpath relative, not absolute and add some steps. | Make the classpath relative, not absolute and add some steps.
| Python | mit | laffra/pava,laffra/pava | import pava
# Tell pava where it can find Java user-defined classes
pava.set_classpath(['c:/Users/laffr/PycharmProjects/pava/pava'])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
# Load a Java class and call a static method on it from Python
import classfiles
classfiles.HelloWorld.main()
Make the classpath relative, not absolute and add some steps. | import os
import pava
# Tell pava where it can find Java user-defined classes
print '1. Loading Java...'
pava.set_classpath([os.path.dirname(__file__)])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
#
# Load the "HelloWorld" Java class and call the static "main" method on it directly from Python
#
# HelloWorld.class gets compiled to Python bytecodes as follows:
#
# Python Bytecodes generated by Pava Original Java bytecodes in HelloWorld.class
# ----------------------------------------------------------------------------------------------------------------------
# 0 LOAD_GLOBAL 0 java ### 0 getstatic ('java/lang/System', 'Ljava/io/PrintStream;', 'out')
# 3 LOAD_ATTR 1 lang
# 6 LOAD_ATTR 2 System
# 9 LOAD_ATTR 3 out
# 12 LOAD_ATTR 4 println
# 15 LOAD_CONST 1 u'Hello World!' ### 3 ldc (u'Hello World!',)
# 18 CALL_FUNCTION 1 ### 5 invokevirtual ('java/io/PrintStream', 'println', '(Ljava/lang/String;)V', 1)
# 21 POP_TOP
# 22 LOAD_CONST 2 0 ### 8 return ()
# 25 RETURN_VALUE
# ---------------------------------------------------------------------------------------------------------------------
#
print '2. Import the Python module that contains the transpiled HelloWorld:'
import classfiles
print '3. Call HelloWorld.main:'
classfiles.HelloWorld.main()
print '4. Done.'
| <commit_before>import pava
# Tell pava where it can find Java user-defined classes
pava.set_classpath(['c:/Users/laffr/PycharmProjects/pava/pava'])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
# Load a Java class and call a static method on it from Python
import classfiles
classfiles.HelloWorld.main()
<commit_msg>Make the classpath relative, not absolute and add some steps.<commit_after> | import os
import pava
# Tell pava where it can find Java user-defined classes
print '1. Loading Java...'
pava.set_classpath([os.path.dirname(__file__)])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
#
# Load the "HelloWorld" Java class and call the static "main" method on it directly from Python
#
# HelloWorld.class gets compiled to Python bytecodes as follows:
#
# Python Bytecodes generated by Pava Original Java bytecodes in HelloWorld.class
# ----------------------------------------------------------------------------------------------------------------------
# 0 LOAD_GLOBAL 0 java ### 0 getstatic ('java/lang/System', 'Ljava/io/PrintStream;', 'out')
# 3 LOAD_ATTR 1 lang
# 6 LOAD_ATTR 2 System
# 9 LOAD_ATTR 3 out
# 12 LOAD_ATTR 4 println
# 15 LOAD_CONST 1 u'Hello World!' ### 3 ldc (u'Hello World!',)
# 18 CALL_FUNCTION 1 ### 5 invokevirtual ('java/io/PrintStream', 'println', '(Ljava/lang/String;)V', 1)
# 21 POP_TOP
# 22 LOAD_CONST 2 0 ### 8 return ()
# 25 RETURN_VALUE
# ---------------------------------------------------------------------------------------------------------------------
#
print '2. Import the Python module that contains the transpiled HelloWorld:'
import classfiles
print '3. Call HelloWorld.main:'
classfiles.HelloWorld.main()
print '4. Done.'
| import pava
# Tell pava where it can find Java user-defined classes
pava.set_classpath(['c:/Users/laffr/PycharmProjects/pava/pava'])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
# Load a Java class and call a static method on it from Python
import classfiles
classfiles.HelloWorld.main()
Make the classpath relative, not absolute and add some steps.import os
import pava
# Tell pava where it can find Java user-defined classes
print '1. Loading Java...'
pava.set_classpath([os.path.dirname(__file__)])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
#
# Load the "HelloWorld" Java class and call the static "main" method on it directly from Python
#
# HelloWorld.class gets compiled to Python bytecodes as follows:
#
# Python Bytecodes generated by Pava Original Java bytecodes in HelloWorld.class
# ----------------------------------------------------------------------------------------------------------------------
# 0 LOAD_GLOBAL 0 java ### 0 getstatic ('java/lang/System', 'Ljava/io/PrintStream;', 'out')
# 3 LOAD_ATTR 1 lang
# 6 LOAD_ATTR 2 System
# 9 LOAD_ATTR 3 out
# 12 LOAD_ATTR 4 println
# 15 LOAD_CONST 1 u'Hello World!' ### 3 ldc (u'Hello World!',)
# 18 CALL_FUNCTION 1 ### 5 invokevirtual ('java/io/PrintStream', 'println', '(Ljava/lang/String;)V', 1)
# 21 POP_TOP
# 22 LOAD_CONST 2 0 ### 8 return ()
# 25 RETURN_VALUE
# ---------------------------------------------------------------------------------------------------------------------
#
print '2. Import the Python module that contains the transpiled HelloWorld:'
import classfiles
print '3. Call HelloWorld.main:'
classfiles.HelloWorld.main()
print '4. Done.'
| <commit_before>import pava
# Tell pava where it can find Java user-defined classes
pava.set_classpath(['c:/Users/laffr/PycharmProjects/pava/pava'])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
# Load a Java class and call a static method on it from Python
import classfiles
classfiles.HelloWorld.main()
<commit_msg>Make the classpath relative, not absolute and add some steps.<commit_after>import os
import pava
# Tell pava where it can find Java user-defined classes
print '1. Loading Java...'
pava.set_classpath([os.path.dirname(__file__)])
try:
import java
class Out(object):
def println(self, s):
print s
java.lang.System.out = Out()
except ImportError:
pass
#
# Load the "HelloWorld" Java class and call the static "main" method on it directly from Python
#
# HelloWorld.class gets compiled to Python bytecodes as follows:
#
# Python Bytecodes generated by Pava Original Java bytecodes in HelloWorld.class
# ----------------------------------------------------------------------------------------------------------------------
# 0 LOAD_GLOBAL 0 java ### 0 getstatic ('java/lang/System', 'Ljava/io/PrintStream;', 'out')
# 3 LOAD_ATTR 1 lang
# 6 LOAD_ATTR 2 System
# 9 LOAD_ATTR 3 out
# 12 LOAD_ATTR 4 println
# 15 LOAD_CONST 1 u'Hello World!' ### 3 ldc (u'Hello World!',)
# 18 CALL_FUNCTION 1 ### 5 invokevirtual ('java/io/PrintStream', 'println', '(Ljava/lang/String;)V', 1)
# 21 POP_TOP
# 22 LOAD_CONST 2 0 ### 8 return ()
# 25 RETURN_VALUE
# ---------------------------------------------------------------------------------------------------------------------
#
print '2. Import the Python module that contains the transpiled HelloWorld:'
import classfiles
print '3. Call HelloWorld.main:'
classfiles.HelloWorld.main()
print '4. Done.'
|
3a98e416f844bf9be93d704a8f7fb9caf3bf1723 | {{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/settings/dev.py | {{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/settings/dev.py | from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
| from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATES[0]['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
| Update TEMPLATE_DEBUG to Django 1.8 version | Update TEMPLATE_DEBUG to Django 1.8 version | Python | bsd-3-clause | torchbox/cookiecutter-wagtail,torchbox/cookiecutter-wagtail,torchbox/wagtail-cookiecutter,torchbox/cookiecutter-wagtail,torchbox/cookiecutter-wagtail,torchbox/wagtail-cookiecutter,torchbox/wagtail-cookiecutter,torchbox/wagtail-cookiecutter | from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
Update TEMPLATE_DEBUG to Django 1.8 version | from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATES[0]['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
| <commit_before>from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
<commit_msg>Update TEMPLATE_DEBUG to Django 1.8 version<commit_after> | from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATES[0]['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
| from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
Update TEMPLATE_DEBUG to Django 1.8 versionfrom .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATES[0]['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
| <commit_before>from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
<commit_msg>Update TEMPLATE_DEBUG to Django 1.8 version<commit_after>from .base import *
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATES[0]['OPTIONS']['debug'] = True
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'CHANGEME!!!'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Process all tasks synchronously.
# Helpful for local development and running tests
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
CELERY_ALWAYS_EAGER = True
try:
from .local import *
except ImportError:
pass
|
d08973c3854d10755e156b1457972a8aaebb251b | bottle_utils/form/__init__.py | bottle_utils/form/__init__.py | """
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
| """
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
| Include LengthValidator in list of exporeted objects | Include LengthValidator in list of exporeted objects
Signed-off-by: Branko Vukelic <26059cc39872530f89fec69552bb1050e1cc2caa@outernet.is>
| Python | bsd-2-clause | Outernet-Project/bottle-utils | """
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
Include LengthValidator in list of exporeted objects
Signed-off-by: Branko Vukelic <26059cc39872530f89fec69552bb1050e1cc2caa@outernet.is> | """
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
| <commit_before>"""
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
<commit_msg>Include LengthValidator in list of exporeted objects
Signed-off-by: Branko Vukelic <26059cc39872530f89fec69552bb1050e1cc2caa@outernet.is><commit_after> | """
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
| """
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
Include LengthValidator in list of exporeted objects
Signed-off-by: Branko Vukelic <26059cc39872530f89fec69552bb1050e1cc2caa@outernet.is>"""
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
| <commit_before>"""
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import Validator, Required, DateValidator, InRangeValidator
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
<commit_msg>Include LengthValidator in list of exporeted objects
Signed-off-by: Branko Vukelic <26059cc39872530f89fec69552bb1050e1cc2caa@outernet.is><commit_after>"""
.. module:: bottle_utils.form
:synopsis: Form processing and validation library
.. moduleauthor:: Outernet Inc <hello@outernet.is>
"""
from .exceptions import ValidationError
from .fields import (DormantField,
Field,
StringField,
PasswordField,
HiddenField,
EmailField,
TextAreaField,
DateField,
FileField,
IntegerField,
FloatField,
BooleanField,
SelectField)
from .forms import Form
from .validators import (Validator, Required, DateValidator, InRangeValidator,
LengthValidator)
__all__ = ['ValidationError',
'DormantField',
'Field',
'StringField',
'PasswordField',
'HiddenField',
'EmailField',
'TextAreaField',
'DateField',
'FileField',
'IntegerField',
'FloatField',
'BooleanField',
'SelectField',
'Form',
'Validator',
'Required',
'DateValidator',
'InRangeValidator']
|
039f6fa4b26b747432138a8bf9e2754c6daafec3 | byceps/blueprints/api/decorators.py | byceps/blueprints/api/decorators.py | """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
if not _has_valid_api_token():
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _has_valid_api_token() -> bool:
request_token = _extract_token_from_request()
if request_token is None:
return False
api_token = api_service.find_api_token_by_token(request_token)
return api_token is not None and not api_token.suspended
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
| """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
from ...services.authentication.api.transfer.models import ApiToken
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
api_token = _find_valid_api_token()
if api_token is None:
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
if api_token.suspended:
www_authenticate = WWWAuthenticate('Bearer')
www_authenticate['error'] = 'invalid_token'
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _find_valid_api_token() -> Optional[ApiToken]:
request_token = _extract_token_from_request()
if request_token is None:
return None
return api_service.find_api_token_by_token(request_token)
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
| Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended | Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
if not _has_valid_api_token():
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _has_valid_api_token() -> bool:
request_token = _extract_token_from_request()
if request_token is None:
return False
api_token = api_service.find_api_token_by_token(request_token)
return api_token is not None and not api_token.suspended
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended | """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
from ...services.authentication.api.transfer.models import ApiToken
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
api_token = _find_valid_api_token()
if api_token is None:
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
if api_token.suspended:
www_authenticate = WWWAuthenticate('Bearer')
www_authenticate['error'] = 'invalid_token'
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _find_valid_api_token() -> Optional[ApiToken]:
request_token = _extract_token_from_request()
if request_token is None:
return None
return api_service.find_api_token_by_token(request_token)
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
| <commit_before>"""
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
if not _has_valid_api_token():
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _has_valid_api_token() -> bool:
request_token = _extract_token_from_request()
if request_token is None:
return False
api_token = api_service.find_api_token_by_token(request_token)
return api_token is not None and not api_token.suspended
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
<commit_msg>Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended<commit_after> | """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
from ...services.authentication.api.transfer.models import ApiToken
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
api_token = _find_valid_api_token()
if api_token is None:
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
if api_token.suspended:
www_authenticate = WWWAuthenticate('Bearer')
www_authenticate['error'] = 'invalid_token'
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _find_valid_api_token() -> Optional[ApiToken]:
request_token = _extract_token_from_request()
if request_token is None:
return None
return api_service.find_api_token_by_token(request_token)
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
| """
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
if not _has_valid_api_token():
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _has_valid_api_token() -> bool:
request_token = _extract_token_from_request()
if request_token is None:
return False
api_token = api_service.find_api_token_by_token(request_token)
return api_token is not None and not api_token.suspended
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended"""
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
from ...services.authentication.api.transfer.models import ApiToken
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
api_token = _find_valid_api_token()
if api_token is None:
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
if api_token.suspended:
www_authenticate = WWWAuthenticate('Bearer')
www_authenticate['error'] = 'invalid_token'
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _find_valid_api_token() -> Optional[ApiToken]:
request_token = _extract_token_from_request()
if request_token is None:
return None
return api_service.find_api_token_by_token(request_token)
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
| <commit_before>"""
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
if not _has_valid_api_token():
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _has_valid_api_token() -> bool:
request_token = _extract_token_from_request()
if request_token is None:
return False
api_token = api_service.find_api_token_by_token(request_token)
return api_token is not None and not api_token.suspended
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
<commit_msg>Add `invalid_token` error to `WWW-Authenticate` header if API token is suspended<commit_after>"""
byceps.blueprints.api.decorators
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from functools import wraps
from typing import Optional
from flask import abort, request
from werkzeug.datastructures import WWWAuthenticate
from ...services.authentication.api import service as api_service
from ...services.authentication.api.transfer.models import ApiToken
def api_token_required(func):
"""Ensure the request is authenticated via API token."""
@wraps(func)
def wrapper(*args, **kwargs):
api_token = _find_valid_api_token()
if api_token is None:
www_authenticate = WWWAuthenticate('Bearer')
abort(401, www_authenticate=www_authenticate)
if api_token.suspended:
www_authenticate = WWWAuthenticate('Bearer')
www_authenticate['error'] = 'invalid_token'
abort(401, www_authenticate=www_authenticate)
return func(*args, **kwargs)
return wrapper
def _find_valid_api_token() -> Optional[ApiToken]:
request_token = _extract_token_from_request()
if request_token is None:
return None
return api_service.find_api_token_by_token(request_token)
def _extract_token_from_request() -> Optional[str]:
header_value = request.headers.get('Authorization')
if header_value is None:
return None
return header_value.replace('Bearer ', '', 1)
|
ed4fea914435b934cf8f0539cfbf31ece15130b9 | trunk/bdp_fe/src/bdp_fe/jobconf/models.py | trunk/bdp_fe/src/bdp_fe/jobconf/models.py | from django.db import models
# Create your models here.
| """
Module bdp_fe.jobconf.models
Create your models here.
"""
from django.db import models
class Job(models.Model):
"""
A Job is a calculation to be run on the BDP
"""
date = models.DateTimeField('date created')
| Include a first model, Job | Include a first model, Job
| Python | apache-2.0 | telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform,telefonicaid/fiware-cosmos-platform | from django.db import models
# Create your models here.
Include a first model, Job | """
Module bdp_fe.jobconf.models
Create your models here.
"""
from django.db import models
class Job(models.Model):
"""
A Job is a calculation to be run on the BDP
"""
date = models.DateTimeField('date created')
| <commit_before>from django.db import models
# Create your models here.
<commit_msg>Include a first model, Job<commit_after> | """
Module bdp_fe.jobconf.models
Create your models here.
"""
from django.db import models
class Job(models.Model):
"""
A Job is a calculation to be run on the BDP
"""
date = models.DateTimeField('date created')
| from django.db import models
# Create your models here.
Include a first model, Job"""
Module bdp_fe.jobconf.models
Create your models here.
"""
from django.db import models
class Job(models.Model):
"""
A Job is a calculation to be run on the BDP
"""
date = models.DateTimeField('date created')
| <commit_before>from django.db import models
# Create your models here.
<commit_msg>Include a first model, Job<commit_after>"""
Module bdp_fe.jobconf.models
Create your models here.
"""
from django.db import models
class Job(models.Model):
"""
A Job is a calculation to be run on the BDP
"""
date = models.DateTimeField('date created')
|
3356cd0c5c85a09107a6ba48e028a54eb5ca076c | script.py | script.py | import ast
import click
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
def cli(code, printed, remove_builtins):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
| import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
| Add graphviz file output argument | Add graphviz file output argument
| Python | mit | LaurEars/codegrapher | import ast
import click
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
def cli(code, printed, remove_builtins):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
Add graphviz file output argument | import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
| <commit_before>import ast
import click
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
def cli(code, printed, remove_builtins):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
<commit_msg>Add graphviz file output argument<commit_after> | import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
| import ast
import click
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
def cli(code, printed, remove_builtins):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
Add graphviz file output argumentimport ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
| <commit_before>import ast
import click
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
def cli(code, printed, remove_builtins):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
<commit_msg>Add graphviz file output argument<commit_after>import ast
import click
from graphing.graph import FunctionGrapher
from parsing.parser import FileVisitor
@click.command()
@click.argument('code', type=click.File('rb'))
@click.option('--printed', default=False, is_flag=True, help='Pretty prints the call tree for each class in the file')
@click.option('--remove-builtins', default=False, is_flag=True, help='Removes builtin functions from call trees')
@click.option('--output', help='Graphviz output file name')
def cli(code, printed, remove_builtins, output):
"""
Parses a file.
codegrapher [file_name]
"""
parsed_code = ast.parse(code.read(), filename='code.py')
visitor = FileVisitor()
visitor.visit(parsed_code)
if printed:
click.echo('Classes in file:')
for class_object in visitor.classes:
if remove_builtins:
class_object.remove_builtins()
click.echo('=' * 80)
click.echo(class_object.name)
click.echo(class_object.pprint())
click.echo('')
if output:
graph = FunctionGrapher()
class_names = set(cls.name for cls in visitor.classes)
for cls in visitor.classes:
graph.add_dict_to_graph(class_names, cls.call_tree)
graph.add_classes_to_graph(visitor.classes)
graph.name = output
graph.render()
|
fd7eef57a562f2963500d34cbbeb607913b5bb21 | txircd/modules/extra/extban_registered.py | txircd/modules/extra/extban_registered.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.refreshUser),
("user-logout", 10, self.refreshUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def refreshUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
rextbans = RExtbans() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.loginUser),
("user-logout", 10, self.logoutUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def loginUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
def logoutUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
changes = []
for channel in user.channels:
for rank in channel.users[user]:
changes.append((rank, user.nick))
modestr = "-{}".format("".join([mode for mode, param in changes]))
params = [param for mode, param in changes if param is not None]
channel.setModes(self.ircd.serverID, modestr, params)
rextbans = RExtbans() | Remove a user's statuses in all channels when they logout | Remove a user's statuses in all channels when they logout
| Python | bsd-3-clause | Heufneutje/txircd,ElementalAlchemist/txircd | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.refreshUser),
("user-logout", 10, self.refreshUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def refreshUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
rextbans = RExtbans()Remove a user's statuses in all channels when they logout | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.loginUser),
("user-logout", 10, self.logoutUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def loginUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
def logoutUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
changes = []
for channel in user.channels:
for rank in channel.users[user]:
changes.append((rank, user.nick))
modestr = "-{}".format("".join([mode for mode, param in changes]))
params = [param for mode, param in changes if param is not None]
channel.setModes(self.ircd.serverID, modestr, params)
rextbans = RExtbans() | <commit_before>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.refreshUser),
("user-logout", 10, self.refreshUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def refreshUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
rextbans = RExtbans()<commit_msg>Remove a user's statuses in all channels when they logout<commit_after> | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.loginUser),
("user-logout", 10, self.logoutUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def loginUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
def logoutUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
changes = []
for channel in user.channels:
for rank in channel.users[user]:
changes.append((rank, user.nick))
modestr = "-{}".format("".join([mode for mode, param in changes]))
params = [param for mode, param in changes if param is not None]
channel.setModes(self.ircd.serverID, modestr, params)
rextbans = RExtbans() | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.refreshUser),
("user-logout", 10, self.refreshUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def refreshUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
rextbans = RExtbans()Remove a user's statuses in all channels when they logoutfrom twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.loginUser),
("user-logout", 10, self.logoutUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def loginUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
def logoutUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
changes = []
for channel in user.channels:
for rank in channel.users[user]:
changes.append((rank, user.nick))
modestr = "-{}".format("".join([mode for mode, param in changes]))
params = [param for mode, param in changes if param is not None]
channel.setModes(self.ircd.serverID, modestr, params)
rextbans = RExtbans() | <commit_before>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.refreshUser),
("user-logout", 10, self.refreshUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def refreshUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
rextbans = RExtbans()<commit_msg>Remove a user's statuses in all channels when they logout<commit_after>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from txircd.utils import ircLower
from zope.interface import implements
class RExtbans(ModuleData):
implements(IPlugin, IModuleData)
name = "RExtbans"
# R extbans take the following forms:
# "R:*" Match any logged in user
# "R:<nick>" Match the user that owns that nick (regardless of whether it is their current nick)
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [
("usermatchban-R", 10, self.matchUser),
("user-login", 10, self.loginUser),
("user-logout", 10, self.logoutUser),
]
def matchUser(self, user, negated, param):
if negated:
return not self.matchUser(user, False, param)
if param == "*":
return user.cache.get("accountid", None) is not None
return ircLower(param) in user.cache.get("ownedNicks", [])
def loginUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
def logoutUser(self, user, donorID=None):
self.ircd.runActionStandard("updateuserbancache", user)
changes = []
for channel in user.channels:
for rank in channel.users[user]:
changes.append((rank, user.nick))
modestr = "-{}".format("".join([mode for mode, param in changes]))
params = [param for mode, param in changes if param is not None]
channel.setModes(self.ircd.serverID, modestr, params)
rextbans = RExtbans() |
b1271aed5f6f5d465fe9d250737d5074dac9d45a | tests/integration/test_mmhint.py | tests/integration/test_mmhint.py | from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
| from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, ACHintError, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
def test_incompatible_masters(tmpdir):
base = "%s/source-serif-pro/" % DATA_DIR
paths = [base + "Light/font.ufo", base + "Black/font.ufo"]
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
with pytest.raises(ACHintError):
hintFiles(options)
| Add test for incompatible masters | Add test for incompatible masters
| Python | apache-2.0 | khaledhosny/psautohint,khaledhosny/psautohint | from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
Add test for incompatible masters | from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, ACHintError, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
def test_incompatible_masters(tmpdir):
base = "%s/source-serif-pro/" % DATA_DIR
paths = [base + "Light/font.ufo", base + "Black/font.ufo"]
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
with pytest.raises(ACHintError):
hintFiles(options)
| <commit_before>from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
<commit_msg>Add test for incompatible masters<commit_after> | from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, ACHintError, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
def test_incompatible_masters(tmpdir):
base = "%s/source-serif-pro/" % DATA_DIR
paths = [base + "Light/font.ufo", base + "Black/font.ufo"]
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
with pytest.raises(ACHintError):
hintFiles(options)
| from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
Add test for incompatible mastersfrom __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, ACHintError, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
def test_incompatible_masters(tmpdir):
base = "%s/source-serif-pro/" % DATA_DIR
paths = [base + "Light/font.ufo", base + "Black/font.ufo"]
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
with pytest.raises(ACHintError):
hintFiles(options)
| <commit_before>from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
<commit_msg>Add test for incompatible masters<commit_after>from __future__ import print_function, division, absolute_import
import glob
import pytest
from psautohint.autohint import ACOptions, ACHintError, hintFiles
from .differ import main as differ
from . import make_temp_copy, DATA_DIR
class Options(ACOptions):
def __init__(self, reference, inpaths, outpaths):
super(Options, self).__init__()
self.inputPaths = inpaths
self.outputPaths = outpaths
self.reference_font = reference
self.hintAll = True
self.verbose = False
@pytest.mark.parametrize("base", glob.glob("%s/*/*Masters" % DATA_DIR))
def test_mmufo(base, tmpdir):
paths = sorted(glob.glob(base + "/*.ufo"))
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
hintFiles(options)
for inpath, outpath in zip(inpaths, outpaths):
assert differ([inpath, outpath])
def test_incompatible_masters(tmpdir):
base = "%s/source-serif-pro/" % DATA_DIR
paths = [base + "Light/font.ufo", base + "Black/font.ufo"]
# the reference font is modified in-place, make a temp copy first
reference = make_temp_copy(tmpdir, paths[0])
inpaths = paths[1:]
outpaths = [str(tmpdir / p) for p in inpaths]
options = Options(reference, inpaths, outpaths)
with pytest.raises(ACHintError):
hintFiles(options)
|
0aa0d4658518417f15cb58e80c5099e22ef9b806 | app/models.py | app/models.py | from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify)
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
| from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify['access_token'])
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
| Fix Spotify api object creation | Fix Spotify api object creation
| Python | mit | DropMuse/DropMuse,DropMuse/DropMuse | from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify)
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
Fix Spotify api object creation | from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify['access_token'])
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
| <commit_before>from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify)
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
<commit_msg>Fix Spotify api object creation<commit_after> | from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify['access_token'])
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
| from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify)
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
Fix Spotify api object creationfrom flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify['access_token'])
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
| <commit_before>from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify)
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
<commit_msg>Fix Spotify api object creation<commit_after>from flask_login import UserMixin
import spotify
import spotipy
import db_utils
import application as app
class User(UserMixin):
''' User class for Flask-Login '''
def __init__(self, user_id, username=None):
self.id = int(user_id)
self.username = username
self._spotify = None
@property
def spotify(self):
oa_client = spotify.sp_oauth
# Fetch credentials from database
if not self._spotify:
self._spotify = db_utils.spotify_creds_for_user(app.engine,
self.id)
# No credentials exist for user
if self._spotify is None:
return None
# Refresh tokens if nescessary
if oa_client.is_token_expired(self._spotify):
self._spotify = oa_client.refresh_access_token(self._spotify)
db_utils.spotify_credentials_upsert(app.engine,
self.id,
self._spotify)
return spotipy.Spotify(auth=self._spotify['access_token'])
class Playlist(object):
''' Playlist object representation '''
def __init__(self, playlist_id, title=None, duration=0, count=0):
self.id = playlist_id
self.title = title
self.duration = duration
self.count = count
|
9ff59c13f0c1295e9a0acd45913f00d8c9a5c0af | mongoctl/errors.py | mongoctl/errors.py | __author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message,cause=None):
self.message = message
self.cause = cause
def __str__(self):
return self.message | __author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message, cause=None):
super(MongoctlException, self).__init__(message)
self._cause = cause | Remove ref to deprecated "message" property of BaseException | Remove ref to deprecated "message" property of BaseException
| Python | mit | mongolab/mongoctl | __author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message,cause=None):
self.message = message
self.cause = cause
def __str__(self):
return self.messageRemove ref to deprecated "message" property of BaseException | __author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message, cause=None):
super(MongoctlException, self).__init__(message)
self._cause = cause | <commit_before>__author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message,cause=None):
self.message = message
self.cause = cause
def __str__(self):
return self.message<commit_msg>Remove ref to deprecated "message" property of BaseException<commit_after> | __author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message, cause=None):
super(MongoctlException, self).__init__(message)
self._cause = cause | __author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message,cause=None):
self.message = message
self.cause = cause
def __str__(self):
return self.messageRemove ref to deprecated "message" property of BaseException__author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message, cause=None):
super(MongoctlException, self).__init__(message)
self._cause = cause | <commit_before>__author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message,cause=None):
self.message = message
self.cause = cause
def __str__(self):
return self.message<commit_msg>Remove ref to deprecated "message" property of BaseException<commit_after>__author__ = 'abdul'
###############################################################################
# Mongoctl Exception class
###############################################################################
class MongoctlException(Exception):
def __init__(self, message, cause=None):
super(MongoctlException, self).__init__(message)
self._cause = cause |
3a7b1ff25c5ff3a1bd86efc7f70582c8268a968d | application.py | application.py | from flask import Flask
application = Flask(__name__)
@application.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@application.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/crime</li>
<li>/api/energy</li>
<li>/api/housing</li>
</ul>
"""
return intro
if __name__ == "__main__":
application.debug = True
application.run() | from flask import Flask
from flask import jsonify
import boto3
import json
import decimal
from boto3.dynamodb.conditions import Key, Attr
import logging
app = Flask(__name__)
logging.basicConfig(level=logging.INFO)
# Helper class to convert a DynamoDB item to JSON.
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
return super(DecimalEncoder, self).default(o)
app.json_encoder = DecimalEncoder
dynamodb = boto3.resource(service_name='dynamodb',
region_name='us-east-1',
# endpoint_url="http://localhost:8000"
)
table = dynamodb.Table('Movies')
@app.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@app.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/movies</li>
</ul>
"""
return intro
@app.route('/api/movies/<year>/')
def movies(year):
""" Sample movies endpoint. """
fe = Key('year').eq(int(year));
pe = "#yr, title, info.rating"
# Expression Attribute Names for Projection Expression only.
ean = { "#yr": "year", }
esk = None
response = table.scan(
FilterExpression=fe,
ProjectionExpression=pe,
ExpressionAttributeNames=ean
)
results = [i for i in response['Items']]
# for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
while 'LastEvaluatedKey' in response:
response = table.scan(
ProjectionExpression=pe,
FilterExpression=fe,
ExpressionAttributeNames= ean,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
results.append(i)
return jsonify(items=results)
if __name__ == "__main__":
app.debug = True
app.run() | Add /movies/<year> endpoint for dynamoDB | Add /movies/<year> endpoint for dynamoDB
| Python | mit | data-north/datanorth-api | from flask import Flask
application = Flask(__name__)
@application.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@application.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/crime</li>
<li>/api/energy</li>
<li>/api/housing</li>
</ul>
"""
return intro
if __name__ == "__main__":
application.debug = True
application.run()Add /movies/<year> endpoint for dynamoDB | from flask import Flask
from flask import jsonify
import boto3
import json
import decimal
from boto3.dynamodb.conditions import Key, Attr
import logging
app = Flask(__name__)
logging.basicConfig(level=logging.INFO)
# Helper class to convert a DynamoDB item to JSON.
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
return super(DecimalEncoder, self).default(o)
app.json_encoder = DecimalEncoder
dynamodb = boto3.resource(service_name='dynamodb',
region_name='us-east-1',
# endpoint_url="http://localhost:8000"
)
table = dynamodb.Table('Movies')
@app.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@app.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/movies</li>
</ul>
"""
return intro
@app.route('/api/movies/<year>/')
def movies(year):
""" Sample movies endpoint. """
fe = Key('year').eq(int(year));
pe = "#yr, title, info.rating"
# Expression Attribute Names for Projection Expression only.
ean = { "#yr": "year", }
esk = None
response = table.scan(
FilterExpression=fe,
ProjectionExpression=pe,
ExpressionAttributeNames=ean
)
results = [i for i in response['Items']]
# for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
while 'LastEvaluatedKey' in response:
response = table.scan(
ProjectionExpression=pe,
FilterExpression=fe,
ExpressionAttributeNames= ean,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
results.append(i)
return jsonify(items=results)
if __name__ == "__main__":
app.debug = True
app.run() | <commit_before>from flask import Flask
application = Flask(__name__)
@application.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@application.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/crime</li>
<li>/api/energy</li>
<li>/api/housing</li>
</ul>
"""
return intro
if __name__ == "__main__":
application.debug = True
application.run()<commit_msg>Add /movies/<year> endpoint for dynamoDB<commit_after> | from flask import Flask
from flask import jsonify
import boto3
import json
import decimal
from boto3.dynamodb.conditions import Key, Attr
import logging
app = Flask(__name__)
logging.basicConfig(level=logging.INFO)
# Helper class to convert a DynamoDB item to JSON.
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
return super(DecimalEncoder, self).default(o)
app.json_encoder = DecimalEncoder
dynamodb = boto3.resource(service_name='dynamodb',
region_name='us-east-1',
# endpoint_url="http://localhost:8000"
)
table = dynamodb.Table('Movies')
@app.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@app.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/movies</li>
</ul>
"""
return intro
@app.route('/api/movies/<year>/')
def movies(year):
""" Sample movies endpoint. """
fe = Key('year').eq(int(year));
pe = "#yr, title, info.rating"
# Expression Attribute Names for Projection Expression only.
ean = { "#yr": "year", }
esk = None
response = table.scan(
FilterExpression=fe,
ProjectionExpression=pe,
ExpressionAttributeNames=ean
)
results = [i for i in response['Items']]
# for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
while 'LastEvaluatedKey' in response:
response = table.scan(
ProjectionExpression=pe,
FilterExpression=fe,
ExpressionAttributeNames= ean,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
results.append(i)
return jsonify(items=results)
if __name__ == "__main__":
app.debug = True
app.run() | from flask import Flask
application = Flask(__name__)
@application.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@application.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/crime</li>
<li>/api/energy</li>
<li>/api/housing</li>
</ul>
"""
return intro
if __name__ == "__main__":
application.debug = True
application.run()Add /movies/<year> endpoint for dynamoDBfrom flask import Flask
from flask import jsonify
import boto3
import json
import decimal
from boto3.dynamodb.conditions import Key, Attr
import logging
app = Flask(__name__)
logging.basicConfig(level=logging.INFO)
# Helper class to convert a DynamoDB item to JSON.
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
return super(DecimalEncoder, self).default(o)
app.json_encoder = DecimalEncoder
dynamodb = boto3.resource(service_name='dynamodb',
region_name='us-east-1',
# endpoint_url="http://localhost:8000"
)
table = dynamodb.Table('Movies')
@app.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@app.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/movies</li>
</ul>
"""
return intro
@app.route('/api/movies/<year>/')
def movies(year):
""" Sample movies endpoint. """
fe = Key('year').eq(int(year));
pe = "#yr, title, info.rating"
# Expression Attribute Names for Projection Expression only.
ean = { "#yr": "year", }
esk = None
response = table.scan(
FilterExpression=fe,
ProjectionExpression=pe,
ExpressionAttributeNames=ean
)
results = [i for i in response['Items']]
# for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
while 'LastEvaluatedKey' in response:
response = table.scan(
ProjectionExpression=pe,
FilterExpression=fe,
ExpressionAttributeNames= ean,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
results.append(i)
return jsonify(items=results)
if __name__ == "__main__":
app.debug = True
app.run() | <commit_before>from flask import Flask
application = Flask(__name__)
@application.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@application.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/crime</li>
<li>/api/energy</li>
<li>/api/housing</li>
</ul>
"""
return intro
if __name__ == "__main__":
application.debug = True
application.run()<commit_msg>Add /movies/<year> endpoint for dynamoDB<commit_after>from flask import Flask
from flask import jsonify
import boto3
import json
import decimal
from boto3.dynamodb.conditions import Key, Attr
import logging
app = Flask(__name__)
logging.basicConfig(level=logging.INFO)
# Helper class to convert a DynamoDB item to JSON.
class DecimalEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
if o % 1 > 0:
return float(o)
else:
return int(o)
return super(DecimalEncoder, self).default(o)
app.json_encoder = DecimalEncoder
dynamodb = boto3.resource(service_name='dynamodb',
region_name='us-east-1',
# endpoint_url="http://localhost:8000"
)
table = dynamodb.Table('Movies')
@app.route('/')
def hello_world():
return 'Please use /api to use the DataNorth API.'
@app.route('/api')
def api_intro():
intro = \
"""
<h2> Welcome to the DataNorth API! </h2>
<h4> The following endpoints are available: </h4>
<ul>
<li>/api/movies</li>
</ul>
"""
return intro
@app.route('/api/movies/<year>/')
def movies(year):
""" Sample movies endpoint. """
fe = Key('year').eq(int(year));
pe = "#yr, title, info.rating"
# Expression Attribute Names for Projection Expression only.
ean = { "#yr": "year", }
esk = None
response = table.scan(
FilterExpression=fe,
ProjectionExpression=pe,
ExpressionAttributeNames=ean
)
results = [i for i in response['Items']]
# for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
while 'LastEvaluatedKey' in response:
response = table.scan(
ProjectionExpression=pe,
FilterExpression=fe,
ExpressionAttributeNames= ean,
ExclusiveStartKey=response['LastEvaluatedKey']
)
for i in response['Items']:
# print(json.dumps(i, cls=DecimalEncoder))
results.append(i)
return jsonify(items=results)
if __name__ == "__main__":
app.debug = True
app.run() |
cf1454686a9e7e00fa11d04bc4cfe443a3ad7c96 | examples/end2end/firstserver.py | examples/end2end/firstserver.py | # Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions
if sessions[session_id] >= len(data):
return ""
res = data[sessions[session_id]]
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
| # Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions # Or crash, aka return error
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions # Or crash, aka return error
res = data[sessions[session_id]] # Or crash, aka return error
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
| Return failure on pop() from an empty queue | Return failure on pop() from an empty queue | Python | apache-2.0 | bozzzzo/quark,datawire/quark,datawire/quark,datawire/quark,datawire/datawire-connect,bozzzzo/quark,datawire/datawire-connect,datawire/datawire-connect,bozzzzo/quark,datawire/quark,bozzzzo/quark,datawire/quark,datawire/quark,datawire/datawire-connect | # Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions
if sessions[session_id] >= len(data):
return ""
res = data[sessions[session_id]]
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
Return failure on pop() from an empty queue | # Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions # Or crash, aka return error
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions # Or crash, aka return error
res = data[sessions[session_id]] # Or crash, aka return error
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
| <commit_before># Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions
if sessions[session_id] >= len(data):
return ""
res = data[sessions[session_id]]
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
<commit_msg>Return failure on pop() from an empty queue<commit_after> | # Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions # Or crash, aka return error
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions # Or crash, aka return error
res = data[sessions[session_id]] # Or crash, aka return error
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
| # Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions
if sessions[session_id] >= len(data):
return ""
res = data[sessions[session_id]]
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
Return failure on pop() from an empty queue# Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions # Or crash, aka return error
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions # Or crash, aka return error
res = data[sessions[session_id]] # Or crash, aka return error
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
| <commit_before># Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions
if sessions[session_id] >= len(data):
return ""
res = data[sessions[session_id]]
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
<commit_msg>Return failure on pop() from an empty queue<commit_after># Topic server
import time
import flask
app = flask.Flask(__name__)
data = []
sessions = {} # session ID -> current index # FIXME No cleanup!
@app.route("/simple/newsession")
def new_session():
session_id = "session" + hex(int(time.time() * 1000))[2:]
sessions[session_id] = 0
return session_id
@app.route("/simple/push/<session_id>/<value>")
def push(session_id, value):
assert session_id in sessions # Or crash, aka return error
data.append(value)
return str(len(data) - 1)
@app.route("/simple/pop/<session_id>")
def pop(session_id):
assert session_id in sessions # Or crash, aka return error
res = data[sessions[session_id]] # Or crash, aka return error
sessions[session_id] += 1
return res
if __name__ == "__main__":
app.run(host="127.0.0.1", port=8080)
|
a552697d3fb830e59720276b111996e717186842 | sieve/sieve.py | sieve/sieve.py | def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
| def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
| Fix bug where n is the square of a prime | Fix bug where n is the square of a prime
| Python | agpl-3.0 | CubicComet/exercism-python-solutions | def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
Fix bug where n is the square of a prime | def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
| <commit_before>def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
<commit_msg>Fix bug where n is the square of a prime<commit_after> | def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
| def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
Fix bug where n is the square of a primedef sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
| <commit_before>def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n, i))
return prime
<commit_msg>Fix bug where n is the square of a prime<commit_after>def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
|
6acdef03da862c6daa7d2b4cc333933afb3f912a | piper/utils.py | piper/utils.py | class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def values(self):
return self.data.values()
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
| class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
if key in ('values', 'keys', 'items'):
# Dict methods, just return and run them.
return getattr(self.data, key)
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
| Make DotDict dict methods return those objects | Make DotDict dict methods return those objects
| Python | mit | thiderman/piper | class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def values(self):
return self.data.values()
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
Make DotDict dict methods return those objects | class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
if key in ('values', 'keys', 'items'):
# Dict methods, just return and run them.
return getattr(self.data, key)
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
| <commit_before>class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def values(self):
return self.data.values()
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
<commit_msg>Make DotDict dict methods return those objects<commit_after> | class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
if key in ('values', 'keys', 'items'):
# Dict methods, just return and run them.
return getattr(self.data, key)
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
| class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def values(self):
return self.data.values()
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
Make DotDict dict methods return those objectsclass DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
if key in ('values', 'keys', 'items'):
# Dict methods, just return and run them.
return getattr(self.data, key)
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
| <commit_before>class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def values(self):
return self.data.values()
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
<commit_msg>Make DotDict dict methods return those objects<commit_after>class DotDict(object):
"""
Immutable dict-like objects accessible by dot notation
Used because the amount of configuration access is very high and just using
dots instead of the dict notation feels good.
"""
def __init__(self, data):
self.data = data
def __repr__(self): # pragma: nocover
return '<DotDict {}>'.format(self.data)
def __getattr__(self, key):
if key in ('values', 'keys', 'items'):
# Dict methods, just return and run them.
return getattr(self.data, key)
val = self.data[key]
if isinstance(val, dict):
val = DotDict(val)
return val
def __eq__(self, other):
return self.data == other.data
# So that we can still access as dicts
__getitem__ = __getattr__
def dynamic_load(target):
"""
Dynamically import a class and return it
This is used by the core parts of the main configuration file since
one of the main features is to let the user specify which class to use.
"""
split = target.split('.')
module_name = '.'.join(split[:-1])
class_name = split[-1]
mod = __import__(module_name, fromlist=[class_name])
return getattr(mod, class_name)
|
17a28964785f3eb39f96d07968358b20be12e30e | marathon/exceptions.py | marathon/exceptions.py | class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
content = response.json()
self.status_code = response.status_code
self.error_message = content.get('message')
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
| class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
self.error_message = response.reason or ''
if response.content:
content = response.json()
self.error_message = content.get('message', self.error_message)
self.status_code = response.status_code
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
| Handle HTTP errors without content graceful | Handle HTTP errors without content graceful
HTTP errors like 503 do not have a content set by Marathon. Try to use
the response reason string as an alternative error message.
| Python | mit | thefactory/marathon-python,thefactory/marathon-python | class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
content = response.json()
self.status_code = response.status_code
self.error_message = content.get('message')
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
Handle HTTP errors without content graceful
HTTP errors like 503 do not have a content set by Marathon. Try to use
the response reason string as an alternative error message. | class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
self.error_message = response.reason or ''
if response.content:
content = response.json()
self.error_message = content.get('message', self.error_message)
self.status_code = response.status_code
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
| <commit_before>class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
content = response.json()
self.status_code = response.status_code
self.error_message = content.get('message')
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
<commit_msg>Handle HTTP errors without content graceful
HTTP errors like 503 do not have a content set by Marathon. Try to use
the response reason string as an alternative error message.<commit_after> | class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
self.error_message = response.reason or ''
if response.content:
content = response.json()
self.error_message = content.get('message', self.error_message)
self.status_code = response.status_code
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
| class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
content = response.json()
self.status_code = response.status_code
self.error_message = content.get('message')
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
Handle HTTP errors without content graceful
HTTP errors like 503 do not have a content set by Marathon. Try to use
the response reason string as an alternative error message.class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
self.error_message = response.reason or ''
if response.content:
content = response.json()
self.error_message = content.get('message', self.error_message)
self.status_code = response.status_code
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
| <commit_before>class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
content = response.json()
self.status_code = response.status_code
self.error_message = content.get('message')
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
<commit_msg>Handle HTTP errors without content graceful
HTTP errors like 503 do not have a content set by Marathon. Try to use
the response reason string as an alternative error message.<commit_after>class MarathonError(Exception):
pass
class MarathonHttpError(MarathonError):
def __init__(self, response):
"""
:param :class:`requests.Response` response: HTTP response
"""
self.error_message = response.reason or ''
if response.content:
content = response.json()
self.error_message = content.get('message', self.error_message)
self.status_code = response.status_code
super(MarathonHttpError, self).__init__(self.__str__())
def __repr__(self):
return 'MarathonHttpError: HTTP %s returned with message, "%s"' % \
(self.status_code, self.error_message)
def __str__(self):
return self.__repr__()
class NotFoundError(MarathonHttpError):
pass
class InternalServerError(MarathonHttpError):
pass
class InvalidChoiceError(MarathonError):
def __init__(self, param, value, options):
super(InvalidChoiceError, self).__init__(
'Invalid choice "{value}" for param "{param}". Must be one of {options}'.format(
param=param, value=value, options=options
)
)
|
dabc1f4a869f8da5106248dcf860c75d1fe9f538 | geotrek/common/management/commands/update_permissions.py | geotrek/common/management/commands/update_permissions.py | import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
if issubclass(content_type.model_class(), BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
| import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
model = content_type.model_class()
if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
| Fix update_permission command for legacy content types | Fix update_permission command for legacy content types
| Python | bsd-2-clause | johan--/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,Anaethelion/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,mabhub/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin | import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
if issubclass(content_type.model_class(), BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
Fix update_permission command for legacy content types | import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
model = content_type.model_class()
if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
| <commit_before>import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
if issubclass(content_type.model_class(), BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
<commit_msg>Fix update_permission command for legacy content types<commit_after> | import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
model = content_type.model_class()
if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
| import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
if issubclass(content_type.model_class(), BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
Fix update_permission command for legacy content typesimport logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
model = content_type.model_class()
if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
| <commit_before>import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
if issubclass(content_type.model_class(), BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
<commit_msg>Fix update_permission command for legacy content types<commit_after>import logging
from django.conf import settings
from django.utils.importlib import import_module
from django.db.models import get_apps
from django.contrib.auth.management import create_permissions
from django.contrib.auth.models import Permission
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from mapentity import registry
from mapentity.registry import create_mapentity_model_permissions
from geotrek.common.mixins import BasePublishableMixin
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Create models permissions"
def execute(self, *args, **options):
logger.info("Synchronize django permissions")
for app in get_apps():
create_permissions(app, [], int(options.get('verbosity', 1)))
logger.info("Done.")
logger.info("Synchronize mapentity permissions")
# Make sure apps are registered at this point
import_module(settings.ROOT_URLCONF)
# For all models registered, add missing bits
for model in registry.registry.keys():
create_mapentity_model_permissions(model)
logger.info("Done.")
logger.info("Synchronize geotrek permissions")
for content_type in ContentType.objects.all():
model = content_type.model_class()
if model and issubclass(model, BasePublishableMixin):
Permission.objects.get_or_create(
codename='publish_%s' % content_type.model,
name='Can publish %s' % content_type.name,
content_type=content_type)
logger.info("Done.")
|
60bdc3cb6d503e675f029a6d2bbf4941267a2087 | pysswords/__main__.py | pysswords/__main__.py | import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_init = parser.add_argument_group("Init options")
group_init.add_argument("-I", "--init", action="store_true")
group_init.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
| import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_db = parser.add_argument_group("Databse options")
group_db.add_argument("-I", "--init", action="store_true")
group_db.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
| Refactor parse args db options | Refactor parse args db options
| Python | mit | scorphus/passpie,eiginn/passpie,marcwebbie/passpie,marcwebbie/pysswords,eiginn/passpie,scorphus/passpie,marcwebbie/passpie | import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_init = parser.add_argument_group("Init options")
group_init.add_argument("-I", "--init", action="store_true")
group_init.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
Refactor parse args db options | import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_db = parser.add_argument_group("Databse options")
group_db.add_argument("-I", "--init", action="store_true")
group_db.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
| <commit_before>import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_init = parser.add_argument_group("Init options")
group_init.add_argument("-I", "--init", action="store_true")
group_init.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
<commit_msg>Refactor parse args db options<commit_after> | import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_db = parser.add_argument_group("Databse options")
group_db.add_argument("-I", "--init", action="store_true")
group_db.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
| import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_init = parser.add_argument_group("Init options")
group_init.add_argument("-I", "--init", action="store_true")
group_init.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
Refactor parse args db optionsimport os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_db = parser.add_argument_group("Databse options")
group_db.add_argument("-I", "--init", action="store_true")
group_db.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
| <commit_before>import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_init = parser.add_argument_group("Init options")
group_init.add_argument("-I", "--init", action="store_true")
group_init.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
<commit_msg>Refactor parse args db options<commit_after>import os
import argparse
def default_db():
return os.path.join(os.path.expanduser("~"), "~/.pysswords")
def parse_args(args):
parser = argparse.ArgumentParser(prog="Pysswords")
group_db = parser.add_argument_group("Databse options")
group_db.add_argument("-I", "--init", action="store_true")
group_db.add_argument("-D", "--database", default=default_db())
group_cred = parser.add_argument_group("Credential options")
group_cred.add_argument("-a", "--add", action="store_true")
group_cred.add_argument("-g", "--get")
group_cred.add_argument("-u", "--update")
group_cred.add_argument("-r", "--remove")
group_cred.add_argument("-s", "--search")
return parser.parse_args(args)
|
b371ec9e8d1fc15c2d3e1093b305b4c8e0944694 | corehq/apps/locations/middleware.py | corehq/apps/locations/middleware.py | from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
| from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
This middleware also sets the property can_access_all_locations. This
property does not imply domain level authentication or access to any
particular feature. All it says is that whether or not the user has a role
which restricts their data access by location.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
| Clarify usage in docstring | Clarify usage in docstring [ci skip]
| Python | bsd-3-clause | qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
Clarify usage in docstring [ci skip] | from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
This middleware also sets the property can_access_all_locations. This
property does not imply domain level authentication or access to any
particular feature. All it says is that whether or not the user has a role
which restricts their data access by location.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
| <commit_before>from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
<commit_msg>Clarify usage in docstring [ci skip]<commit_after> | from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
This middleware also sets the property can_access_all_locations. This
property does not imply domain level authentication or access to any
particular feature. All it says is that whether or not the user has a role
which restricts their data access by location.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
| from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
Clarify usage in docstring [ci skip]from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
This middleware also sets the property can_access_all_locations. This
property does not imply domain level authentication or access to any
particular feature. All it says is that whether or not the user has a role
which restricts their data access by location.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
| <commit_before>from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
<commit_msg>Clarify usage in docstring [ci skip]<commit_after>from .permissions import is_location_safe, location_restricted_response
class LocationAccessMiddleware(object):
"""
Many large projects want to restrict data access by location.
Views which handle that properly are called "location safe". This
middleware blocks access to any non location safe features by users who
have such a restriction. If these users do not have an assigned location,
they cannot access anything.
This middleware also sets the property can_access_all_locations. This
property does not imply domain level authentication or access to any
particular feature. All it says is that whether or not the user has a role
which restricts their data access by location.
"""
def process_view(self, request, view_fn, view_args, view_kwargs):
user = getattr(request, 'couch_user', None)
domain = getattr(request, 'domain', None)
if not domain or not user or not user.is_member_of(domain):
# This is probably some non-domain page or a test, let normal auth handle it
request.can_access_all_locations = True
elif user.has_permission(domain, 'access_all_locations'):
request.can_access_all_locations = True
else:
request.can_access_all_locations = False
if (
not is_location_safe(view_fn, view_args, view_kwargs)
or not user.get_sql_location(domain)
):
return location_restricted_response(request)
|
2c56f31d3f730d530a0d00f8e18788671e6934b8 | kazoo/tests/test_security.py | kazoo/tests/test_security.py | import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl['perms'] & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl['perms'] & perm, perm)
| import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl.perms & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl.perms & perm, perm)
| Read perms off the ACL object properly | Read perms off the ACL object properly
| Python | apache-2.0 | rockerbox/kazoo,Asana/kazoo,bsanders/kazoo,rgs1/kazoo,python-zk/kazoo,rockerbox/kazoo,AlexanderplUs/kazoo,max0d41/kazoo,pombredanne/kazoo,max0d41/kazoo,harlowja/kazoo,kormat/kazoo,rgs1/kazoo,pombredanne/kazoo,tempbottle/kazoo,harlowja/kazoo,jacksontj/kazoo,bsanders/kazoo,rackerlabs/kazoo,tempbottle/kazoo,kormat/kazoo,python-zk/kazoo,rackerlabs/kazoo,AlexanderplUs/kazoo,jacksontj/kazoo | import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl['perms'] & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl['perms'] & perm, perm)
Read perms off the ACL object properly | import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl.perms & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl.perms & perm, perm)
| <commit_before>import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl['perms'] & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl['perms'] & perm, perm)
<commit_msg>Read perms off the ACL object properly<commit_after> | import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl.perms & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl.perms & perm, perm)
| import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl['perms'] & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl['perms'] & perm, perm)
Read perms off the ACL object properlyimport unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl.perms & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl.perms & perm, perm)
| <commit_before>import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl['perms'] & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl['perms'] & perm, perm)
<commit_msg>Read perms off the ACL object properly<commit_after>import unittest
from nose.tools import eq_
import zookeeper
class TestACL(unittest.TestCase):
def _makeOne(self, *args, **kwargs):
from kazoo.security import make_acl
return make_acl(*args, **kwargs)
def test_read_acl(self):
acl = self._makeOne("digest", ":", read=True)
eq_(acl.perms & zookeeper.PERM_READ, zookeeper.PERM_READ)
def test_all_perms(self):
acl = self._makeOne("digest", ":", write=True, create=True,
delete=True, admin=True)
for perm in [zookeeper.PERM_WRITE, zookeeper.PERM_CREATE,
zookeeper.PERM_DELETE, zookeeper.PERM_ADMIN]:
eq_(acl.perms & perm, perm)
|
b7ea2db86ad67410330d412a8733cb4dab2c4109 | partner_academic_title/models/partner_academic_title.py | partner_academic_title/models/partner_academic_title.py | # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
| # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
| Add translate=True on academic title name | Add translate=True on academic title name
| Python | agpl-3.0 | sergiocorato/partner-contact | # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
Add translate=True on academic title name | # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
<commit_msg>Add translate=True on academic title name<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
| # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
Add translate=True on academic title name# -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
<commit_msg>Add translate=True on academic title name<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of partner_academic_title,
# an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# partner_academic_title is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# partner_academic_title is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with partner_academic_title.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields
class PartnerAcademicTitle(models.Model):
_name = 'partner.academic.title'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(required=True,
help="""defines the order to display titles""")
active = fields.Boolean(default=True)
|
bbf3e1bbb8ccd7f0408b24dd7575588f6567c807 | gastosabertos/receita/models.py | gastosabertos/receita/models.py | # -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(), nullable=False)
description = Column(db.String(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
| # -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(30), nullable=False)
description = Column(db.Text(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
| Change Revenu model column types for 'code' column and 'description' for MySQL | Change Revenu model column types for 'code' column and 'description' for MySQL
| Python | agpl-3.0 | nucleo-digital/gastos_abertos,andresmrm/gastos_abertos,LuizArmesto/gastos_abertos,LuizArmesto/gastos_abertos,okfn-brasil/gastos_abertos,andresmrm/gastos_abertos,okfn-brasil/gastos_abertos | # -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(), nullable=False)
description = Column(db.String(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
Change Revenu model column types for 'code' column and 'description' for MySQL | # -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(30), nullable=False)
description = Column(db.Text(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
| <commit_before># -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(), nullable=False)
description = Column(db.String(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
<commit_msg>Change Revenu model column types for 'code' column and 'description' for MySQL<commit_after> | # -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(30), nullable=False)
description = Column(db.Text(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
| # -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(), nullable=False)
description = Column(db.String(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
Change Revenu model column types for 'code' column and 'description' for MySQL# -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(30), nullable=False)
description = Column(db.Text(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
| <commit_before># -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(), nullable=False)
description = Column(db.String(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
<commit_msg>Change Revenu model column types for 'code' column and 'description' for MySQL<commit_after># -*- coding: utf-8 -*-
from sqlalchemy import Column, types
from ..extensions import db
class Revenue(db.Model):
__tablename__ = 'revenue'
id = Column(db.Integer, primary_key=True)
code = Column(db.String(30), nullable=False)
description = Column(db.Text(), nullable=False)
date = Column(db.Date())
monthly_predicted = Column(db.DECIMAL(19,2))
monthly_outcome = Column(db.DECIMAL(19,2))
economical_category = Column(db.Integer)
economical_subcategory = Column(db.Integer)
source = Column(db.Integer)
rubric = Column(db.Integer)
paragraph = Column(db.Integer)
subparagraph = Column(db.Integer)
|
675b7acc6d04c6f3764f1fd148afd0a2b2134d7e | civictechprojects/sitemaps.py | civictechprojects/sitemaps.py | from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
sections = [FrontEndSection.AboutUs, FrontEndSection.FindProjects, FrontEndSection.PartnerWithUs, FrontEndSection.Donate,
FrontEndSection.Press, FrontEndSection.ContactUs]
def items(self):
return self.sections
def location(self, section):
return '/index/?section=' + str(section.value)
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
| from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
pages = [
str(FrontEndSection.AboutUs.value),
str(FrontEndSection.FindProjects.value),
str(FrontEndSection.FindProjects.value) + '&showSplash=1',
str(FrontEndSection.PartnerWithUs.value),
str(FrontEndSection.Donate.value),
str(FrontEndSection.Press.value),
str(FrontEndSection.ContactUs.value)
]
def items(self):
return self.pages
def location(self, page):
return '/index/?section=' + page
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
| Add splash page to sitemap | Add splash page to sitemap
| Python | mit | DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange,DemocracyLab/CivicTechExchange | from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
sections = [FrontEndSection.AboutUs, FrontEndSection.FindProjects, FrontEndSection.PartnerWithUs, FrontEndSection.Donate,
FrontEndSection.Press, FrontEndSection.ContactUs]
def items(self):
return self.sections
def location(self, section):
return '/index/?section=' + str(section.value)
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
Add splash page to sitemap | from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
pages = [
str(FrontEndSection.AboutUs.value),
str(FrontEndSection.FindProjects.value),
str(FrontEndSection.FindProjects.value) + '&showSplash=1',
str(FrontEndSection.PartnerWithUs.value),
str(FrontEndSection.Donate.value),
str(FrontEndSection.Press.value),
str(FrontEndSection.ContactUs.value)
]
def items(self):
return self.pages
def location(self, page):
return '/index/?section=' + page
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
| <commit_before>from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
sections = [FrontEndSection.AboutUs, FrontEndSection.FindProjects, FrontEndSection.PartnerWithUs, FrontEndSection.Donate,
FrontEndSection.Press, FrontEndSection.ContactUs]
def items(self):
return self.sections
def location(self, section):
return '/index/?section=' + str(section.value)
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
<commit_msg>Add splash page to sitemap<commit_after> | from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
pages = [
str(FrontEndSection.AboutUs.value),
str(FrontEndSection.FindProjects.value),
str(FrontEndSection.FindProjects.value) + '&showSplash=1',
str(FrontEndSection.PartnerWithUs.value),
str(FrontEndSection.Donate.value),
str(FrontEndSection.Press.value),
str(FrontEndSection.ContactUs.value)
]
def items(self):
return self.pages
def location(self, page):
return '/index/?section=' + page
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
| from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
sections = [FrontEndSection.AboutUs, FrontEndSection.FindProjects, FrontEndSection.PartnerWithUs, FrontEndSection.Donate,
FrontEndSection.Press, FrontEndSection.ContactUs]
def items(self):
return self.sections
def location(self, section):
return '/index/?section=' + str(section.value)
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
Add splash page to sitemapfrom common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
pages = [
str(FrontEndSection.AboutUs.value),
str(FrontEndSection.FindProjects.value),
str(FrontEndSection.FindProjects.value) + '&showSplash=1',
str(FrontEndSection.PartnerWithUs.value),
str(FrontEndSection.Donate.value),
str(FrontEndSection.Press.value),
str(FrontEndSection.ContactUs.value)
]
def items(self):
return self.pages
def location(self, page):
return '/index/?section=' + page
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
| <commit_before>from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
sections = [FrontEndSection.AboutUs, FrontEndSection.FindProjects, FrontEndSection.PartnerWithUs, FrontEndSection.Donate,
FrontEndSection.Press, FrontEndSection.ContactUs]
def items(self):
return self.sections
def location(self, section):
return '/index/?section=' + str(section.value)
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
<commit_msg>Add splash page to sitemap<commit_after>from common.helpers.constants import FrontEndSection
from django.conf import settings
from django.contrib.sitemaps import Sitemap
from .models import Project
from datetime import date
class SectionSitemap(Sitemap):
protocol = "https"
changefreq = "monthly"
priority = 0.5
# TODO: Update this date for each release
lastmod = settings.SITE_LAST_UPDATED
pages = [
str(FrontEndSection.AboutUs.value),
str(FrontEndSection.FindProjects.value),
str(FrontEndSection.FindProjects.value) + '&showSplash=1',
str(FrontEndSection.PartnerWithUs.value),
str(FrontEndSection.Donate.value),
str(FrontEndSection.Press.value),
str(FrontEndSection.ContactUs.value)
]
def items(self):
return self.pages
def location(self, page):
return '/index/?section=' + page
class ProjectSitemap(Sitemap):
protocol = "https"
changefreq = "daily"
priority = 0.5
def items(self):
return Project.objects.filter(is_searchable=True).order_by('id')
def location(self, project):
return '/index/?section=AboutProject&id=' + str(project.id)
def lastmod(self, project):
return project.project_date_modified
|
364cb2307021cc11de5a31f577e12a5f3e1f6bf6 | openpathsampling/engines/toy/snapshot.py | openpathsampling/engines/toy/snapshot.py | """
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
import openpathsampling.engines.features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
| """
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
from openpathsampling.engines import features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
| Fix for bad merge decision | Fix for bad merge decision
| Python | mit | openpathsampling/openpathsampling,dwhswenson/openpathsampling,choderalab/openpathsampling,dwhswenson/openpathsampling,choderalab/openpathsampling,dwhswenson/openpathsampling,dwhswenson/openpathsampling,openpathsampling/openpathsampling,openpathsampling/openpathsampling,openpathsampling/openpathsampling,choderalab/openpathsampling | """
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
import openpathsampling.engines.features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
Fix for bad merge decision | """
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
from openpathsampling.engines import features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
| <commit_before>"""
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
import openpathsampling.engines.features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
<commit_msg>Fix for bad merge decision<commit_after> | """
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
from openpathsampling.engines import features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
| """
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
import openpathsampling.engines.features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
Fix for bad merge decision"""
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
from openpathsampling.engines import features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
| <commit_before>"""
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
import openpathsampling.engines.features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
<commit_msg>Fix for bad merge decision<commit_after>"""
@author: JD Chodera
@author: JH Prinz
"""
from openpathsampling.engines import BaseSnapshot, SnapshotFactory
from openpathsampling.engines import features as feats
from . import features as toy_feats
@feats.attach_features([
toy_feats.velocities,
toy_feats.coordinates,
toy_feats.instantaneous_temperature,
toy_feats.engine
])
class ToySnapshot(BaseSnapshot):
"""
Simulation snapshot. Only references to coordinates and velocities
"""
@property
def topology(self):
return self.engine.topology
@property
def masses(self):
return self.topology.masses
# The following code does almost the same as above
# ToySnapshot = SnapshotFactory(
# name='ToySnapshot',
# features=[
# features.velocities,
# features.coordinates,
# features.engine
# ],
# description="Simulation snapshot. Only references to coordinates and "
# "velocities",
# base_class=BaseSnapshot
# )
|
7eb580d11dc8506cf656021d12884562d1a1b823 | dumper/site.py | dumper/site.py | from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {} should return a list of paths, not a'
'string'.format(model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
| from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {model_name} should return a list of paths, '
' not a string'.format(model_name=model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
| Use keyword based `format` to maintain 2.6 compatibility | Use keyword based `format` to maintain 2.6 compatibility
| Python | mit | saulshanabrook/django-dumper | from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {} should return a list of paths, not a'
'string'.format(model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
Use keyword based `format` to maintain 2.6 compatibility | from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {model_name} should return a list of paths, '
' not a string'.format(model_name=model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
| <commit_before>from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {} should return a list of paths, not a'
'string'.format(model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
<commit_msg>Use keyword based `format` to maintain 2.6 compatibility<commit_after> | from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {model_name} should return a list of paths, '
' not a string'.format(model_name=model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
| from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {} should return a list of paths, not a'
'string'.format(model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
Use keyword based `format` to maintain 2.6 compatibilityfrom six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {model_name} should return a list of paths, '
' not a string'.format(model_name=model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
| <commit_before>from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {} should return a list of paths, not a'
'string'.format(model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
<commit_msg>Use keyword based `format` to maintain 2.6 compatibility<commit_after>from six import string_types
from django.db.models import signals
from .invalidation import invalidate_paths
def register(model):
register_instance_function_at_save(model, invalidate_model_paths)
def register_instance_function_at_save(model, function):
def save_function(sender, instance, **kwargs):
function(instance)
signals.post_save.connect(save_function, model, weak=False)
signals.pre_delete.connect(save_function, model, weak=False)
def get_paths_from_model(model):
paths = model.dependent_paths()
if isinstance(paths, string_types):
model_name = model.__class__.__name__
raise TypeError(
('dependent_paths on {model_name} should return a list of paths, '
' not a string'.format(model_name=model_name))
)
return paths
def invalidate_model_paths(model):
paths = get_paths_from_model(model)
invalidate_paths(paths)
|
91c33bdeea9214c9594d2d3f9bd1255403d62034 | notify_levure_app_of_save.py | notify_levure_app_of_save.py | import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
| import sublime
import sublime_plugin
import re
import socket
import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
host ="localhost"
port = 62475
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
s.connect((host,port))
query = {'stack': stack_name, 'filename': view.file_name()}
data = urllib.parse.urlencode(query) + "\n"
s.send(data.encode())
data = s.recv(1024).decode()
s.close()
if data != 'success':
print('error updating script in LiveCode: ' + data)
else:
print('script updated in LiveCode')
| Send update notification to server | Send update notification to server
| Python | mit | trevordevore/livecode-sublimetext | import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
Send update notification to server | import sublime
import sublime_plugin
import re
import socket
import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
host ="localhost"
port = 62475
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
s.connect((host,port))
query = {'stack': stack_name, 'filename': view.file_name()}
data = urllib.parse.urlencode(query) + "\n"
s.send(data.encode())
data = s.recv(1024).decode()
s.close()
if data != 'success':
print('error updating script in LiveCode: ' + data)
else:
print('script updated in LiveCode')
| <commit_before>import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
<commit_msg>Send update notification to server<commit_after> | import sublime
import sublime_plugin
import re
import socket
import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
host ="localhost"
port = 62475
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
s.connect((host,port))
query = {'stack': stack_name, 'filename': view.file_name()}
data = urllib.parse.urlencode(query) + "\n"
s.send(data.encode())
data = s.recv(1024).decode()
s.close()
if data != 'success':
print('error updating script in LiveCode: ' + data)
else:
print('script updated in LiveCode')
| import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
Send update notification to serverimport sublime
import sublime_plugin
import re
import socket
import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
host ="localhost"
port = 62475
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
s.connect((host,port))
query = {'stack': stack_name, 'filename': view.file_name()}
data = urllib.parse.urlencode(query) + "\n"
s.send(data.encode())
data = s.recv(1024).decode()
s.close()
if data != 'success':
print('error updating script in LiveCode: ' + data)
else:
print('script updated in LiveCode')
| <commit_before>import sublime
import sublime_plugin
import re
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
print('stack name and filename', stack_name, view.file_name())
<commit_msg>Send update notification to server<commit_after>import sublime
import sublime_plugin
import re
import socket
import urllib
class LevureAppNotify(sublime_plugin.EventListener):
def on_post_save(self, view):
# 1. Get script only stack name. line 1: script "Name" [done]
# 2. Get project key from project settings
# 3. Send notification over socket with project key, script name, and filename
# 4. Get response from LiveCode IDE
# We are only concerned with files using Livecode syntax
if view.settings().get('syntax') == 'Packages/LiveCode/LiveCode.sublime-syntax':
stack_name = None
# Get the script only stack name
# \A matches beginning of file
region = view.find('\Ascript "([-a-zA-Z0-9_\s\?!]+)"', 0, sublime.IGNORECASE)
if region.a >= 0:
stack_name = re.search('"([-a-zA-Z0-9_\s\?!]+)"', view.substr(region)).group(1)
host ="localhost"
port = 62475
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #socket.SOCK_DGRAM
s.connect((host,port))
query = {'stack': stack_name, 'filename': view.file_name()}
data = urllib.parse.urlencode(query) + "\n"
s.send(data.encode())
data = s.recv(1024).decode()
s.close()
if data != 'success':
print('error updating script in LiveCode: ' + data)
else:
print('script updated in LiveCode')
|
881ceeb7f814bf640caf2d7a803bfc2d350b082d | plumeria/storage/__init__.py | plumeria/storage/__init__.py | import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(), autocommit=True)
await migrations.setup()
| import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(),
autocommit=True, charset='utf8mb4')
await migrations.setup()
| Make sure to set MySQL charset. | Make sure to set MySQL charset.
| Python | mit | sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria | import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(), autocommit=True)
await migrations.setup()
Make sure to set MySQL charset. | import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(),
autocommit=True, charset='utf8mb4')
await migrations.setup()
| <commit_before>import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(), autocommit=True)
await migrations.setup()
<commit_msg>Make sure to set MySQL charset.<commit_after> | import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(),
autocommit=True, charset='utf8mb4')
await migrations.setup()
| import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(), autocommit=True)
await migrations.setup()
Make sure to set MySQL charset.import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(),
autocommit=True, charset='utf8mb4')
await migrations.setup()
| <commit_before>import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(), autocommit=True)
await migrations.setup()
<commit_msg>Make sure to set MySQL charset.<commit_after>import aiomysql
from .. import config
from ..event import bus
from .migration import MigrationManager
host = config.create("storage", "host", fallback="localhost", comment="The database server host")
port = config.create("storage", "port", type=int, fallback=3306, comment="The database server port")
user = config.create("storage", "user", fallback="plumeria", comment="The database server username")
password = config.create("storage", "password", fallback="", comment="The database server password")
db = config.create("storage", "db", fallback="plumeria", comment="The database name")
class Pool:
def __init__(self):
self.pool = None
def acquire(self):
return self.pool.acquire()
pool = Pool()
migrations = MigrationManager(pool)
@bus.event('preinit')
async def preinit():
pool.pool = await aiomysql.create_pool(host=host(), port=port(), user=user(), password=password(), db=db(),
autocommit=True, charset='utf8mb4')
await migrations.setup()
|
89b9fb1cb14aeb99cb7c96717830898aead4fef1 | src/waldur_core/core/management/commands/createstaffuser.py | src/waldur_core/core/management/commands/createstaffuser.py | from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
user, created = User.objects.get_or_create(
username=username, defaults=dict(last_login=timezone.now(), is_staff=True)
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
| from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
parser.add_argument('-e', '--email', dest='email', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
email = options['email']
user, created = User.objects.get_or_create(
username=username,
email=email,
defaults=dict(last_login=timezone.now(), is_staff=True),
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
| Allow setting email when creating a staff account. | Allow setting email when creating a staff account.
Otherwise makes it hard to start using HomePort as it requires email validation.
| Python | mit | opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind | from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
user, created = User.objects.get_or_create(
username=username, defaults=dict(last_login=timezone.now(), is_staff=True)
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
Allow setting email when creating a staff account.
Otherwise makes it hard to start using HomePort as it requires email validation. | from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
parser.add_argument('-e', '--email', dest='email', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
email = options['email']
user, created = User.objects.get_or_create(
username=username,
email=email,
defaults=dict(last_login=timezone.now(), is_staff=True),
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
| <commit_before>from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
user, created = User.objects.get_or_create(
username=username, defaults=dict(last_login=timezone.now(), is_staff=True)
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
<commit_msg>Allow setting email when creating a staff account.
Otherwise makes it hard to start using HomePort as it requires email validation.<commit_after> | from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
parser.add_argument('-e', '--email', dest='email', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
email = options['email']
user, created = User.objects.get_or_create(
username=username,
email=email,
defaults=dict(last_login=timezone.now(), is_staff=True),
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
| from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
user, created = User.objects.get_or_create(
username=username, defaults=dict(last_login=timezone.now(), is_staff=True)
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
Allow setting email when creating a staff account.
Otherwise makes it hard to start using HomePort as it requires email validation.from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
parser.add_argument('-e', '--email', dest='email', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
email = options['email']
user, created = User.objects.get_or_create(
username=username,
email=email,
defaults=dict(last_login=timezone.now(), is_staff=True),
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
| <commit_before>from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
user, created = User.objects.get_or_create(
username=username, defaults=dict(last_login=timezone.now(), is_staff=True)
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
<commit_msg>Allow setting email when creating a staff account.
Otherwise makes it hard to start using HomePort as it requires email validation.<commit_after>from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
class Command(BaseCommand):
help = "Create a user with a specified username and password. User will be created as staff."
def add_arguments(self, parser):
parser.add_argument('-u', '--username', dest='username', required=True)
parser.add_argument('-p', '--password', dest='password', required=True)
parser.add_argument('-e', '--email', dest='email', required=True)
def handle(self, *args, **options):
User = get_user_model()
username = options['username']
password = options['password']
email = options['email']
user, created = User.objects.get_or_create(
username=username,
email=email,
defaults=dict(last_login=timezone.now(), is_staff=True),
)
if not created:
raise CommandError('Username %s is already taken.' % username)
user.set_password(password)
user.save()
self.stdout.write(self.style.SUCCESS('User %s has been created.' % username))
|
5c9e9d33113c7fcf49223853abf52f1e91b17687 | frappe/integrations/doctype/google_maps_settings/google_maps_settings.py | frappe/integrations/doctype/google_maps_settings/google_maps_settings.py | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
import googlemaps
import datetime
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
| # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import googlemaps
import frappe
from frappe import _
from frappe.model.document import Document
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
if not self.enabled:
frappe.throw(_("Google Maps integration is not enabled"))
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
| Check if Google Maps is enabled when trying to get the client | Check if Google Maps is enabled when trying to get the client
| Python | mit | adityahase/frappe,adityahase/frappe,ESS-LLP/frappe,almeidapaulopt/frappe,StrellaGroup/frappe,yashodhank/frappe,RicardoJohann/frappe,yashodhank/frappe,ESS-LLP/frappe,frappe/frappe,mhbu50/frappe,saurabh6790/frappe,saurabh6790/frappe,adityahase/frappe,vjFaLk/frappe,yashodhank/frappe,mhbu50/frappe,vjFaLk/frappe,almeidapaulopt/frappe,frappe/frappe,vjFaLk/frappe,StrellaGroup/frappe,ESS-LLP/frappe,yashodhank/frappe,RicardoJohann/frappe,mhbu50/frappe,mhbu50/frappe,RicardoJohann/frappe,RicardoJohann/frappe,saurabh6790/frappe,StrellaGroup/frappe,frappe/frappe,ESS-LLP/frappe,saurabh6790/frappe,adityahase/frappe,vjFaLk/frappe,almeidapaulopt/frappe,almeidapaulopt/frappe | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
import googlemaps
import datetime
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
Check if Google Maps is enabled when trying to get the client | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import googlemaps
import frappe
from frappe import _
from frappe.model.document import Document
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
if not self.enabled:
frappe.throw(_("Google Maps integration is not enabled"))
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
import googlemaps
import datetime
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
<commit_msg>Check if Google Maps is enabled when trying to get the client<commit_after> | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import googlemaps
import frappe
from frappe import _
from frappe.model.document import Document
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
if not self.enabled:
frappe.throw(_("Google Maps integration is not enabled"))
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
| # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
import googlemaps
import datetime
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
Check if Google Maps is enabled when trying to get the client# -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import googlemaps
import frappe
from frappe import _
from frappe.model.document import Document
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
if not self.enabled:
frappe.throw(_("Google Maps integration is not enabled"))
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
import googlemaps
import datetime
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
<commit_msg>Check if Google Maps is enabled when trying to get the client<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import googlemaps
import frappe
from frappe import _
from frappe.model.document import Document
class GoogleMapsSettings(Document):
def validate(self):
if self.enabled:
if not self.client_key:
frappe.throw(_("Client key is required"))
if not self.home_address:
frappe.throw(_("Home Address is required"))
def get_client(self):
if not self.enabled:
frappe.throw(_("Google Maps integration is not enabled"))
try:
client = googlemaps.Client(key=self.client_key)
except Exception as e:
frappe.throw(e.message)
return client
|
cfc4b9d10d43da3c68503d544d96a0ea8bb5d543 | bpython/__init__.py | bpython/__init__.py | # The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
| # The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
| Bump version number and tag release | Bump version number and tag release
| Python | mit | 5monkeys/bpython | # The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
Bump version number and tag release | # The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
| <commit_before># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
<commit_msg>Bump version number and tag release<commit_after> | # The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
| # The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
Bump version number and tag release# The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
| <commit_before># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = 'mercurial'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
<commit_msg>Bump version number and tag release<commit_after># The MIT License
#
# Copyright (c) 2008 Bob Farrell
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os.path
__version__ = '0.11'
package_dir = os.path.abspath(os.path.dirname(__file__))
def embed(locals_=None, args=['-i', '-q'], banner=None):
from bpython.cli import main
return main(args, locals_, banner)
|
7f0121b4ade7a14f47cbf3d1573134dffaaf86ee | src/nodeconductor_assembly_waldur/slurm_invoices/apps.py | src/nodeconductor_assembly_waldur/slurm_invoices/apps.py | from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'SLURM invoices'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
| from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'Batch packages'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
| Rename SLURM invoices application for better Django dashoard menu item. | Rename SLURM invoices application for better Django dashoard menu item.
| Python | mit | opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind | from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'SLURM invoices'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
Rename SLURM invoices application for better Django dashoard menu item. | from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'Batch packages'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
| <commit_before>from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'SLURM invoices'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
<commit_msg>Rename SLURM invoices application for better Django dashoard menu item.<commit_after> | from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'Batch packages'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
| from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'SLURM invoices'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
Rename SLURM invoices application for better Django dashoard menu item.from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'Batch packages'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
| <commit_before>from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'SLURM invoices'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
<commit_msg>Rename SLURM invoices application for better Django dashoard menu item.<commit_after>from django.apps import AppConfig
from django.db.models import signals
class SlurmInvoicesConfig(AppConfig):
name = 'nodeconductor_assembly_waldur.slurm_invoices'
verbose_name = 'Batch packages'
def ready(self):
from nodeconductor_assembly_waldur.invoices import registrators
from waldur_slurm import models as slurm_models
from . import handlers, registrators as slurm_registrators
registrators.RegistrationManager.add_registrator(
slurm_models.Allocation,
slurm_registrators.AllocationRegistrator
)
signals.post_save.connect(
handlers.add_new_allocation_to_invoice,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.add_new_allocation_to_invoice',
)
signals.post_save.connect(
handlers.terminate_invoice_when_allocation_cancelled,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_cancelled',
)
signals.pre_delete.connect(
handlers.terminate_invoice_when_allocation_deleted,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.terminate_invoice_when_allocation_deleted',
)
signals.post_save.connect(
handlers.update_invoice_item_on_allocation_usage_update,
sender=slurm_models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_invoice_item_on_allocation_usage_update',
)
|
ecd7f5f46146fa9378000ac469f6eca8f64ac31d | stoq/tests/data/plugins/archiver/dummy_archiver/dummy_archiver.py | stoq/tests/data/plugins/archiver/dummy_archiver/dummy_archiver.py | #!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import ArchiverPlugin
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: str) -> Optional[Payload]:
return None
| #!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.plugins import ArchiverPlugin
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: ArchiverResponse) -> Optional[Payload]:
return None
| Fix test signature value type for task | Fix test signature value type for task
| Python | apache-2.0 | PUNCH-Cyber/stoq | #!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import ArchiverPlugin
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: str) -> Optional[Payload]:
return None
Fix test signature value type for task | #!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.plugins import ArchiverPlugin
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: ArchiverResponse) -> Optional[Payload]:
return None
| <commit_before>#!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import ArchiverPlugin
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: str) -> Optional[Payload]:
return None
<commit_msg>Fix test signature value type for task<commit_after> | #!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.plugins import ArchiverPlugin
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: ArchiverResponse) -> Optional[Payload]:
return None
| #!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import ArchiverPlugin
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: str) -> Optional[Payload]:
return None
Fix test signature value type for task#!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.plugins import ArchiverPlugin
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: ArchiverResponse) -> Optional[Payload]:
return None
| <commit_before>#!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
from stoq.plugins import ArchiverPlugin
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: str) -> Optional[Payload]:
return None
<commit_msg>Fix test signature value type for task<commit_after>#!/usr/bin/env python3
# Copyright 2014-2018 PUNCH Cyber Analytics Group
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
from stoq.plugins import ArchiverPlugin
from stoq.data_classes import ArchiverResponse, Payload, RequestMeta
class DummyArchiver(ArchiverPlugin):
def archive(
self, payload: Payload, request_meta: RequestMeta
) -> Optional[ArchiverResponse]:
return None
def get(self, task: ArchiverResponse) -> Optional[Payload]:
return None
|
a7337c249fef106c74b5d83684311b1be7657169 | website/settings/local-travis.py | website/settings/local-travis.py | # -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
| # -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
ENABLE_VARNISH = True
| Add ENABLE_VARNISH setting to travis settings | Add ENABLE_VARNISH setting to travis settings
| Python | apache-2.0 | brandonPurvis/osf.io,DanielSBrown/osf.io,jnayak1/osf.io,Johnetordoff/osf.io,sloria/osf.io,asanfilippo7/osf.io,binoculars/osf.io,samchrisinger/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,emetsger/osf.io,adlius/osf.io,alexschiller/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,doublebits/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,zamattiac/osf.io,icereval/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,chrisseto/osf.io,kch8qx/osf.io,aaxelb/osf.io,samchrisinger/osf.io,laurenrevere/osf.io,mluke93/osf.io,pattisdr/osf.io,rdhyee/osf.io,cwisecarver/osf.io,leb2dg/osf.io,mluo613/osf.io,mluke93/osf.io,emetsger/osf.io,RomanZWang/osf.io,aaxelb/osf.io,alexschiller/osf.io,KAsante95/osf.io,laurenrevere/osf.io,rdhyee/osf.io,pattisdr/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,wearpants/osf.io,felliott/osf.io,alexschiller/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,kwierman/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,adlius/osf.io,asanfilippo7/osf.io,doublebits/osf.io,cslzchen/osf.io,baylee-d/osf.io,rdhyee/osf.io,SSJohns/osf.io,erinspace/osf.io,binoculars/osf.io,cslzchen/osf.io,baylee-d/osf.io,acshi/osf.io,abought/osf.io,acshi/osf.io,kwierman/osf.io,doublebits/osf.io,mluo613/osf.io,emetsger/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,hmoco/osf.io,GageGaskins/osf.io,icereval/osf.io,rdhyee/osf.io,jnayak1/osf.io,amyshi188/osf.io,KAsante95/osf.io,caseyrollins/osf.io,acshi/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,Johnetordoff/osf.io,billyhunt/osf.io,crcresearch/osf.io,asanfilippo7/osf.io,amyshi188/osf.io,caneruguz/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,KAsante95/osf.io,erinspace/osf.io,KAsante95/osf.io,mluo613/osf.io,Nesiehr/osf.io,abought/osf.io,chennan47/osf.io,mattclark/osf.io,GageGaskins/osf.io,cwisecarver/osf.io,sloria/osf.io,chrisseto/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,zachjanicki/osf.io,zamattiac/osf.io,kch8qx/osf.io,mluo613/osf.io,crcresearch/osf.io,adlius/osf.io,pattisdr/osf.io,aaxelb/osf.io,RomanZWang/osf.io,mattclark/osf.io,amyshi188/osf.io,abought/osf.io,icereval/osf.io,cslzchen/osf.io,KAsante95/osf.io,caseyrollins/osf.io,billyhunt/osf.io,leb2dg/osf.io,laurenrevere/osf.io,saradbowman/osf.io,kwierman/osf.io,adlius/osf.io,TomHeatwole/osf.io,doublebits/osf.io,billyhunt/osf.io,SSJohns/osf.io,mluke93/osf.io,wearpants/osf.io,saradbowman/osf.io,acshi/osf.io,TomHeatwole/osf.io,Nesiehr/osf.io,hmoco/osf.io,emetsger/osf.io,Nesiehr/osf.io,erinspace/osf.io,mfraezz/osf.io,GageGaskins/osf.io,mluke93/osf.io,DanielSBrown/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,asanfilippo7/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,sloria/osf.io,jnayak1/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,mattclark/osf.io,wearpants/osf.io,brandonPurvis/osf.io,zamattiac/osf.io,brandonPurvis/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,alexschiller/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,felliott/osf.io,hmoco/osf.io,samchrisinger/osf.io,crcresearch/osf.io,SSJohns/osf.io,cwisecarver/osf.io,kch8qx/osf.io,samchrisinger/osf.io,felliott/osf.io,leb2dg/osf.io,chrisseto/osf.io,doublebits/osf.io,leb2dg/osf.io,TomBaxter/osf.io,billyhunt/osf.io,cslzchen/osf.io,billyhunt/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,felliott/osf.io,caneruguz/osf.io,RomanZWang/osf.io,aaxelb/osf.io,binoculars/osf.io,TomHeatwole/osf.io,mfraezz/osf.io,mfraezz/osf.io,jnayak1/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,abought/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,chennan47/osf.io | # -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
Add ENABLE_VARNISH setting to travis settings | # -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
ENABLE_VARNISH = True
| <commit_before># -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
<commit_msg>Add ENABLE_VARNISH setting to travis settings<commit_after> | # -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
ENABLE_VARNISH = True
| # -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
Add ENABLE_VARNISH setting to travis settings# -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
ENABLE_VARNISH = True
| <commit_before># -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
<commit_msg>Add ENABLE_VARNISH setting to travis settings<commit_after># -*- coding: utf-8 -*-
'''Example settings/local.py file.
These settings override what's in website/settings/defaults.py
NOTE: local.py will not be added to source control.
'''
from . import defaults
DB_PORT = 27017
DEV_MODE = True
DEBUG_MODE = True # Sets app to debug mode, turns off template caching, etc.
SEARCH_ENGINE = 'elastic'
USE_EMAIL = False
USE_CELERY = False
USE_GNUPG = False
# Email
MAIL_SERVER = 'localhost:1025' # For local testing
MAIL_USERNAME = 'osf-smtp'
MAIL_PASSWORD = 'CHANGEME'
# Session
COOKIE_NAME = 'osf'
SECRET_KEY = "CHANGEME"
##### Celery #####
## Default RabbitMQ broker
BROKER_URL = 'amqp://'
# Default RabbitMQ backend
CELERY_RESULT_BACKEND = 'amqp://'
USE_CDN_FOR_CLIENT_LIBS = False
SENTRY_DSN = None
TEST_DB_NAME = DB_NAME = 'osf_test'
VARNISH_SERVERS = ['http://localhost:8080']
ENABLE_VARNISH = True
|
533bb1a3e3d845a84c3f897cb490df02fb11a71f | stock_cancel/__openerp__.py | stock_cancel/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': True,
'images': ['images/stock_picking.jpg'],
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': False,
'images': ['images/stock_picking.jpg'],
}
| Set as not installable because it's broken | [FIX] stock_cancel: Set as not installable because it's broken
| Python | agpl-3.0 | archetipo/stock-logistics-workflow,damdam-s/stock-logistics-workflow,grap/stock-logistics-workflow,Endika/stock-logistics-workflow,raycarnes/stock-logistics-workflow,xpansa/stock-logistics-workflow,Antiun/stock-logistics-workflow,acsone/stock-logistics-workflow,open-synergy/stock-logistics-workflow,brain-tec/stock-logistics-workflow,pedrobaeza/stock-logistics-workflow,Eficent/stock-logistics-workflow,Eficent/stock-logistics-workflow,OpenCode/stock-logistics-workflow,vrenaville/stock-logistics-workflow,gurneyalex/stock-logistics-workflow,gurneyalex/stock-logistics-workflow,akretion/stock-logistics-workflow,BT-jmichaud/stock-logistics-workflow,hurrinico/stock-logistics-workflow,BT-fgarbely/stock-logistics-workflow,oihane/stock-logistics-workflow,akretion/stock-logistics-workflow,open-synergy/stock-logistics-workflow,xpansa/stock-logistics-workflow,brain-tec/stock-logistics-workflow,acsone/stock-logistics-workflow | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': True,
'images': ['images/stock_picking.jpg'],
}
[FIX] stock_cancel: Set as not installable because it's broken | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': False,
'images': ['images/stock_picking.jpg'],
}
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': True,
'images': ['images/stock_picking.jpg'],
}
<commit_msg>[FIX] stock_cancel: Set as not installable because it's broken<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': False,
'images': ['images/stock_picking.jpg'],
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': True,
'images': ['images/stock_picking.jpg'],
}
[FIX] stock_cancel: Set as not installable because it's broken# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': False,
'images': ['images/stock_picking.jpg'],
}
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': True,
'images': ['images/stock_picking.jpg'],
}
<commit_msg>[FIX] stock_cancel: Set as not installable because it's broken<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (c) 2014 Andrea Cometa All Rights Reserved.
# www.andreacometa.it
# openerp@andreacometa.it
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Stock Cancel',
'version': '1.2',
'category': 'Stock',
'description': """This module allows you to bring back a completed stock
picking to draft state""",
'author': 'www.andreacometa.it',
'website': 'http://www.andreacometa.it',
'depends': ['stock_picking_invoice_link'],
'data': [
'stock_view.xml',
],
'installable': False,
'images': ['images/stock_picking.jpg'],
}
|
aadcb7f700391d1e1b8a6442198a9a2131e6f407 | asyncio_irc/connection.py | asyncio_irc/connection.py | import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
if not message:
self.disconnect()
return
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
| import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
| Move disconnect decision into Connection.handle | Move disconnect decision into Connection.handle
| Python | bsd-2-clause | meshy/framewirc | import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
if not message:
self.disconnect()
return
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
Move disconnect decision into Connection.handle | import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
| <commit_before>import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
if not message:
self.disconnect()
return
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
<commit_msg>Move disconnect decision into Connection.handle<commit_after> | import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
| import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
if not message:
self.disconnect()
return
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
Move disconnect decision into Connection.handleimport asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
| <commit_before>import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
if not message:
self.disconnect()
return
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
<commit_msg>Move disconnect decision into Connection.handle<commit_after>import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
message = yield from self.reader.readline()
self.handle(message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
self.on_disconnect()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def on_disconnect(self):
print('Connection closed')
def send(self, message):
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
|
eb7e36629a61515778029096370ccfb41399590f | bluebottle/activities/migrations/0018_auto_20200212_1025.py | bluebottle/activities/migrations/0018_auto_20200212_1025.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
try:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
except Permission.DoesNotExist:
pass
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
| Fix migration for new tenants | Fix migration for new tenants
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
Fix migration for new tenants | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
try:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
except Permission.DoesNotExist:
pass
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
<commit_msg>Fix migration for new tenants<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
try:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
except Permission.DoesNotExist:
pass
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
Fix migration for new tenants# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
try:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
except Permission.DoesNotExist:
pass
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
<commit_msg>Fix migration for new tenants<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-02-12 09:25
from __future__ import unicode_literals
from django.db import migrations, connection
from bluebottle.clients import properties
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from django.contrib.auth.models import Permission, Group
def remove_anonymous_permissions(apps, schema_editor):
permissions = (
('assignments', 'api_read_assignment'),
('events', 'api_read_event'),
('activities', 'api_read_activity'),
('funding', 'api_read_funding'),
)
tenant = Client.objects.get(schema_name=connection.tenant.schema_name)
with LocalTenant(tenant):
if properties.CLOSED_SITE:
anonymous = Group.objects.get(name='Anonymous')
for (app, codename) in permissions:
try:
permission = Permission.objects.get(
content_type__app_label=app,
codename=codename
)
anonymous.permissions.remove(permission)
except Permission.DoesNotExist:
pass
anonymous.save()
class Migration(migrations.Migration):
dependencies = [
('activities', '0017_auto_20200205_1054'),
]
operations = [
migrations.RunPython(remove_anonymous_permissions)
]
|
b966f81af56d9f68414e72d30b0e3b3a49011ac4 | node/lookup.py | node/lookup.py | import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
print query.lookup("caedes").encode("hex")
| import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
| Remove default cedes pointless search | Remove default cedes pointless search
| Python | mit | yagoulas/OpenBazaar,tortxof/OpenBazaar,im0rtel/OpenBazaar,tortxof/OpenBazaar,akhavr/OpenBazaar,saltduck/OpenBazaar,dlcorporation/openbazaar,rllola/OpenBazaar,STRML/OpenBazaar,must-/OpenBazaar,kordless/OpenBazaar,bankonme/OpenBazaar,kujenga/OpenBazaar,tortxof/OpenBazaar,freebazaar/FreeBazaar,dlcorporation/openbazaar,Renelvon/OpenBazaar,NolanZhao/OpenBazaar,dionyziz/OpenBazaar,freebazaar/FreeBazaar,zenhacklab/OpenBazaar,atsuyim/OpenBazaar,tortxof/OpenBazaar,dlcorporation/openbazaar,STRML/OpenBazaar,rllola/OpenBazaar,bankonme/OpenBazaar,habibmasuro/OpenBazaar,hoffmabc/OpenBazaar,NolanZhao/OpenBazaar,hoffmabc/OpenBazaar,im0rtel/OpenBazaar,eXcomm/OpenBazaar,habibmasuro/OpenBazaar,dlcorporation/openbazaar,freebazaar/FreeBazaar,Renelvon/OpenBazaar,STRML/OpenBazaar,blakejakopovic/OpenBazaar,blakejakopovic/OpenBazaar,hoffmabc/OpenBazaar,yagoulas/OpenBazaar,mirrax/OpenBazaar,hoffmabc/OpenBazaar,mirrax/OpenBazaar,dionyziz/OpenBazaar,bglassy/OpenBazaar,must-/OpenBazaar,hoffmabc/OpenBazaar,atsuyim/OpenBazaar,Renelvon/OpenBazaar,blakejakopovic/OpenBazaar,zenhacklab/OpenBazaar,atsuyim/OpenBazaar,NolanZhao/OpenBazaar,dionyziz/OpenBazaar,dionyziz/OpenBazaar,akhavr/OpenBazaar,eXcomm/OpenBazaar,bglassy/OpenBazaar,bglassy/OpenBazaar,mirrax/OpenBazaar,bglassy/OpenBazaar,freebazaar/FreeBazaar,matiasbastos/OpenBazaar,habibmasuro/OpenBazaar,zenhacklab/OpenBazaar,im0rtel/OpenBazaar,kordless/OpenBazaar,STRML/OpenBazaar,must-/OpenBazaar,hoffmabc/OpenBazaar,eXcomm/OpenBazaar,kujenga/OpenBazaar,akhavr/OpenBazaar,must-/OpenBazaar,zenhacklab/OpenBazaar,rllola/OpenBazaar,zenhacklab/OpenBazaar,akhavr/OpenBazaar,atsuyim/OpenBazaar,kordless/OpenBazaar,dlcorporation/openbazaar,rllola/OpenBazaar,kordless/OpenBazaar,akhavr/OpenBazaar,yagoulas/OpenBazaar,saltduck/OpenBazaar,eXcomm/OpenBazaar,matiasbastos/OpenBazaar,eXcomm/OpenBazaar,matiasbastos/OpenBazaar,saltduck/OpenBazaar,dlcorporation/openbazaar,bankonme/OpenBazaar,im0rtel/OpenBazaar,bankonme/OpenBazaar,yagoulas/OpenBazaar,matiasbastos/OpenBazaar,kujenga/OpenBazaar,Renelvon/OpenBazaar,freebazaar/FreeBazaar,dionyziz/OpenBazaar,mirrax/OpenBazaar,blakejakopovic/OpenBazaar,NolanZhao/OpenBazaar,kujenga/OpenBazaar,habibmasuro/OpenBazaar,saltduck/OpenBazaar | import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
print query.lookup("caedes").encode("hex")
Remove default cedes pointless search | import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
| <commit_before>import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
print query.lookup("caedes").encode("hex")
<commit_msg>Remove default cedes pointless search<commit_after> | import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
| import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
print query.lookup("caedes").encode("hex")
Remove default cedes pointless searchimport zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
| <commit_before>import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
print query.lookup("caedes").encode("hex")
<commit_msg>Remove default cedes pointless search<commit_after>import zmq
class QueryIdent:
def __init__(self):
self._ctx = zmq.Context()
self._socket = self._ctx.socket(zmq.REQ)
# Point to OpenBazaar Identity server for now
self._socket.connect("tcp://seed.openbazaar.org:5558")
def lookup(self, user):
self._socket.send(user)
key = self._socket.recv()
print user
if key == "__NONE__":
return None
return key
if __name__ == "__main__":
query = QueryIdent()
|
dce249d7b14c8d6438f336a3f6e34c6c62b29533 | cla_backend/urls.py | cla_backend/urls.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
| Add status endpoint to admin server | Add status endpoint to admin server
| Python | mit | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
Add status endpoint to admin server | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
<commit_msg>Add status endpoint to admin server<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
Add status endpoint to admin server# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
<commit_msg>Add status endpoint to admin server<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
urlpatterns = static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.ADMIN_ENABLED:
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/reports/', include('reports.urls', namespace='reports')),
)
if settings.BACKEND_ENABLED:
urlpatterns += patterns(
'',
url(r'^status/', include('status.urls', namespace='status')),
url(r'^checker/api/v1/', include('checker.urls', namespace='checker')),
url(r'^call_centre/api/v1/', include('call_centre.urls', namespace='call_centre')),
url(r'^cla_provider/api/v1/', include('cla_provider.urls', namespace='cla_provider')),
url(r'^oauth2/', include('cla_auth.urls', namespace='oauth2')),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^means_test/api/v1/', include('means_test_api.urls', namespace='means_test')),
)
|
e93fa13dc27e0590786d9b12d40145c19dbd3794 | podium/talks/models.py | podium/talks/models.py | from django.db import models
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return f'/talks/talks/{self.id}/'
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return f'/talks/sessions/{self.id}/'
| from django.db import models
from django.urls import reverse
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return reverse('talks-talks-id', args=[self.id])
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return reverse('talks-sessions-id', args=[self.id])
| Use reverse to implement get_absolute_url. | Use reverse to implement get_absolute_url.
| Python | mit | pyatl/podium-django,pyatl/podium-django,pyatl/podium-django | from django.db import models
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return f'/talks/talks/{self.id}/'
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return f'/talks/sessions/{self.id}/'
Use reverse to implement get_absolute_url. | from django.db import models
from django.urls import reverse
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return reverse('talks-talks-id', args=[self.id])
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return reverse('talks-sessions-id', args=[self.id])
| <commit_before>from django.db import models
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return f'/talks/talks/{self.id}/'
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return f'/talks/sessions/{self.id}/'
<commit_msg>Use reverse to implement get_absolute_url.<commit_after> | from django.db import models
from django.urls import reverse
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return reverse('talks-talks-id', args=[self.id])
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return reverse('talks-sessions-id', args=[self.id])
| from django.db import models
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return f'/talks/talks/{self.id}/'
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return f'/talks/sessions/{self.id}/'
Use reverse to implement get_absolute_url.from django.db import models
from django.urls import reverse
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return reverse('talks-talks-id', args=[self.id])
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return reverse('talks-sessions-id', args=[self.id])
| <commit_before>from django.db import models
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return f'/talks/talks/{self.id}/'
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return f'/talks/sessions/{self.id}/'
<commit_msg>Use reverse to implement get_absolute_url.<commit_after>from django.db import models
from django.urls import reverse
TALK_STATUS_CHOICES = (
('S', 'Submitted'),
('A', 'Approved'),
('R', 'Rejected'),
('C', 'Confirmed'),
)
class Talk(models.Model):
speaker_name = models.CharField(max_length=1000)
speaker_email = models.CharField(max_length=1000)
title = models.CharField(max_length=1000)
description = models.TextField()
sessions_available = models.ManyToManyField(
'Session', related_name='talks_available')
status = models.CharField(
max_length=1, choices=TALK_STATUS_CHOICES,
default='S')
def get_absolute_url(self):
return reverse('talks-talks-id', args=[self.id])
def __str__(self):
return self.speaker_name
class Session(models.Model):
date = models.DateField()
description = models.TextField(
blank=True, help_text='Any special theme or info about the session.')
def __str__(self):
return '{} - {} '.format(self.date, self.description)
def approved_talks(self):
sets = [
self.talks_available.filter(status=status) for status in ('A', 'C')
]
return sets[0].union(sets[1])
def get_absolute_url(self):
return reverse('talks-sessions-id', args=[self.id])
|
77492f53bf718d01fe6166f2a2e1f57203ce6852 | class4/exercise5.py | class4/exercise5.py | from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
| # Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
| Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'. | Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
| Python | apache-2.0 | linkdebian/pynet_course | from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'. | # Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
| <commit_before>from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
<commit_msg>Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.<commit_after> | # Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
| from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.# Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
| <commit_before>from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
<commit_msg>Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.<commit_after># Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
|
bd0c2f19558033e68a5272dd84b153ff3f6fc9b3 | py_controller_client/src/py_controller_client/waypoint_client.py | py_controller_client/src/py_controller_client/waypoint_client.py | #! /usr/bin/env python
import rospy
import actionlib
import cpp_controller_msgs.msg
def waypoint_client():
pass
if __name__ == '__main__':
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client()
print "Result:", result
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
| #! /usr/bin/env python
import rospy
import actionlib
from cpp_controller_msgs.msg import *
from geometry_msgs.msg import Pose2D
def waypoint_client(waypoints = []):
# Create the client, passing the type of the action to the constructor.
client = actionlib.SimpleActionClient("waypoint_following",
WaypointFollowingAction)
# Wait until the action server has started up.
client.wait_for_server()
# Create a goal to be sent to the action server.
action_goal = WaypointFollowingGoal()
# Fill out the request part of the message.
action_goal.waypoints = waypoints
# Send the goal to the action server.
client.send_goal(action_goal)
# Wait for the server to finish performing the action.
client.wait_for_result()
return client.get_state()
if __name__ == '__main__':
waypoints = list()
waypoints.append(Pose2D(x = 9.0, y = 1.0))
waypoints.append(Pose2D(x = 9.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 1.0))
for wp in waypoints:
print wp
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client(waypoints)
print "Result:", result, ("SUCCEEDED = 3")
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
| Add a simple Python action client | [py_controller_client] Add a simple Python action client
| Python | bsd-3-clause | spmaniato/cs2024_ros_cpp_project,spmaniato/cs2024_ros_cpp_project | #! /usr/bin/env python
import rospy
import actionlib
import cpp_controller_msgs.msg
def waypoint_client():
pass
if __name__ == '__main__':
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client()
print "Result:", result
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
[py_controller_client] Add a simple Python action client | #! /usr/bin/env python
import rospy
import actionlib
from cpp_controller_msgs.msg import *
from geometry_msgs.msg import Pose2D
def waypoint_client(waypoints = []):
# Create the client, passing the type of the action to the constructor.
client = actionlib.SimpleActionClient("waypoint_following",
WaypointFollowingAction)
# Wait until the action server has started up.
client.wait_for_server()
# Create a goal to be sent to the action server.
action_goal = WaypointFollowingGoal()
# Fill out the request part of the message.
action_goal.waypoints = waypoints
# Send the goal to the action server.
client.send_goal(action_goal)
# Wait for the server to finish performing the action.
client.wait_for_result()
return client.get_state()
if __name__ == '__main__':
waypoints = list()
waypoints.append(Pose2D(x = 9.0, y = 1.0))
waypoints.append(Pose2D(x = 9.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 1.0))
for wp in waypoints:
print wp
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client(waypoints)
print "Result:", result, ("SUCCEEDED = 3")
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
| <commit_before>#! /usr/bin/env python
import rospy
import actionlib
import cpp_controller_msgs.msg
def waypoint_client():
pass
if __name__ == '__main__':
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client()
print "Result:", result
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
<commit_msg>[py_controller_client] Add a simple Python action client<commit_after> | #! /usr/bin/env python
import rospy
import actionlib
from cpp_controller_msgs.msg import *
from geometry_msgs.msg import Pose2D
def waypoint_client(waypoints = []):
# Create the client, passing the type of the action to the constructor.
client = actionlib.SimpleActionClient("waypoint_following",
WaypointFollowingAction)
# Wait until the action server has started up.
client.wait_for_server()
# Create a goal to be sent to the action server.
action_goal = WaypointFollowingGoal()
# Fill out the request part of the message.
action_goal.waypoints = waypoints
# Send the goal to the action server.
client.send_goal(action_goal)
# Wait for the server to finish performing the action.
client.wait_for_result()
return client.get_state()
if __name__ == '__main__':
waypoints = list()
waypoints.append(Pose2D(x = 9.0, y = 1.0))
waypoints.append(Pose2D(x = 9.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 1.0))
for wp in waypoints:
print wp
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client(waypoints)
print "Result:", result, ("SUCCEEDED = 3")
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
| #! /usr/bin/env python
import rospy
import actionlib
import cpp_controller_msgs.msg
def waypoint_client():
pass
if __name__ == '__main__':
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client()
print "Result:", result
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
[py_controller_client] Add a simple Python action client#! /usr/bin/env python
import rospy
import actionlib
from cpp_controller_msgs.msg import *
from geometry_msgs.msg import Pose2D
def waypoint_client(waypoints = []):
# Create the client, passing the type of the action to the constructor.
client = actionlib.SimpleActionClient("waypoint_following",
WaypointFollowingAction)
# Wait until the action server has started up.
client.wait_for_server()
# Create a goal to be sent to the action server.
action_goal = WaypointFollowingGoal()
# Fill out the request part of the message.
action_goal.waypoints = waypoints
# Send the goal to the action server.
client.send_goal(action_goal)
# Wait for the server to finish performing the action.
client.wait_for_result()
return client.get_state()
if __name__ == '__main__':
waypoints = list()
waypoints.append(Pose2D(x = 9.0, y = 1.0))
waypoints.append(Pose2D(x = 9.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 1.0))
for wp in waypoints:
print wp
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client(waypoints)
print "Result:", result, ("SUCCEEDED = 3")
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
| <commit_before>#! /usr/bin/env python
import rospy
import actionlib
import cpp_controller_msgs.msg
def waypoint_client():
pass
if __name__ == '__main__':
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client()
print "Result:", result
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
<commit_msg>[py_controller_client] Add a simple Python action client<commit_after>#! /usr/bin/env python
import rospy
import actionlib
from cpp_controller_msgs.msg import *
from geometry_msgs.msg import Pose2D
def waypoint_client(waypoints = []):
# Create the client, passing the type of the action to the constructor.
client = actionlib.SimpleActionClient("waypoint_following",
WaypointFollowingAction)
# Wait until the action server has started up.
client.wait_for_server()
# Create a goal to be sent to the action server.
action_goal = WaypointFollowingGoal()
# Fill out the request part of the message.
action_goal.waypoints = waypoints
# Send the goal to the action server.
client.send_goal(action_goal)
# Wait for the server to finish performing the action.
client.wait_for_result()
return client.get_state()
if __name__ == '__main__':
waypoints = list()
waypoints.append(Pose2D(x = 9.0, y = 1.0))
waypoints.append(Pose2D(x = 9.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 9.0))
waypoints.append(Pose2D(x = 1.0, y = 1.0))
for wp in waypoints:
print wp
try:
rospy.init_node("waypoint_client_py")
result = waypoint_client(waypoints)
print "Result:", result, ("SUCCEEDED = 3")
except rospy.ROSInterruptException as e:
print "Client interrupted before completion!", str(e)
|
6c29585d1d47ff7cafc7f4fbc03abc977211e885 | jasylibrary.py | jasylibrary.py | # Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
| # Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
if hasattr(profile, "getCssFolder"):
# Old jasy < 1.5-beta4
folder = profile.getCssFolder()
else:
# New jasy >= 1.5-beta4
folder = profile.getCssOutputFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
| Fix detecting css output folder in different jasy versions | Fix detecting css output folder in different jasy versions
| Python | mit | fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur | # Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
Fix detecting css output folder in different jasy versions | # Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
if hasattr(profile, "getCssFolder"):
# Old jasy < 1.5-beta4
folder = profile.getCssFolder()
else:
# New jasy >= 1.5-beta4
folder = profile.getCssOutputFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
| <commit_before># Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
<commit_msg>Fix detecting css output folder in different jasy versions<commit_after> | # Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
if hasattr(profile, "getCssFolder"):
# Old jasy < 1.5-beta4
folder = profile.getCssFolder()
else:
# New jasy >= 1.5-beta4
folder = profile.getCssOutputFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
| # Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
Fix detecting css output folder in different jasy versions# Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
if hasattr(profile, "getCssFolder"):
# Old jasy < 1.5-beta4
folder = profile.getCssFolder()
else:
# New jasy >= 1.5-beta4
folder = profile.getCssOutputFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
| <commit_before># Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
<commit_msg>Fix detecting css output folder in different jasy versions<commit_after># Little helper to allow python modules in current jasylibrarys path
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
if hasattr(profile, "getCssFolder"):
# Old jasy < 1.5-beta4
folder = profile.getCssFolder()
else:
# New jasy >= 1.5-beta4
folder = profile.getCssOutputFolder()
outputPath = os.path.relpath(os.path.join(profile.getDestinationPath(), folder), profile.getWorkingPath())
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
profile.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
|
0f68cbe43506db577e08a18f97cc8cba6f7367cf | combine/manifest.py | combine/manifest.py | # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(action)
else:
mft.add_property(key, value)
for action in data["actions"]:
mft.add_action(action)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
| # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(dict(action))
else:
mft.add_property(key, value)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
| Fix issues with loading from dict | Fix issues with loading from dict
| Python | mit | redmatter/combine | # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(action)
else:
mft.add_property(key, value)
for action in data["actions"]:
mft.add_action(action)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
Fix issues with loading from dict | # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(dict(action))
else:
mft.add_property(key, value)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
| <commit_before># Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(action)
else:
mft.add_property(key, value)
for action in data["actions"]:
mft.add_action(action)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
<commit_msg>Fix issues with loading from dict<commit_after> | # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(dict(action))
else:
mft.add_property(key, value)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
| # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(action)
else:
mft.add_property(key, value)
for action in data["actions"]:
mft.add_action(action)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
Fix issues with loading from dict# Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(dict(action))
else:
mft.add_property(key, value)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
| <commit_before># Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(action)
else:
mft.add_property(key, value)
for action in data["actions"]:
mft.add_action(action)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
<commit_msg>Fix issues with loading from dict<commit_after># Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(dict(action))
else:
mft.add_property(key, value)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
|
c7511d81236f2a28019d8d8e103b03e0d1150e32 | django_website/blog/admin.py | django_website/blog/admin.py | from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
admin.site.register(Entry,
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author'),
list_filter = ('is_active',),
exclude = ('summary_html', 'body_html'),
prepopulated_fields = {"slug": ("headline",)}
)
| from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author')
list_filter = ('is_active',)
exclude = ('summary_html', 'body_html')
prepopulated_fields = {"slug": ("headline",)}
admin.site.register(Entry, EntryAdmin)
| Use proper ModelAdmin for blog entry | Use proper ModelAdmin for blog entry | Python | bsd-3-clause | khkaminska/djangoproject.com,nanuxbe/django,rmoorman/djangoproject.com,gnarf/djangoproject.com,relekang/djangoproject.com,relekang/djangoproject.com,django/djangoproject.com,alawnchen/djangoproject.com,vxvinh1511/djangoproject.com,nanuxbe/django,nanuxbe/django,django/djangoproject.com,xavierdutreilh/djangoproject.com,gnarf/djangoproject.com,django/djangoproject.com,relekang/djangoproject.com,alawnchen/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,django/djangoproject.com,rmoorman/djangoproject.com,relekang/djangoproject.com,hassanabidpk/djangoproject.com,rmoorman/djangoproject.com,rmoorman/djangoproject.com,xavierdutreilh/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,xavierdutreilh/djangoproject.com,django/djangoproject.com,hassanabidpk/djangoproject.com,vxvinh1511/djangoproject.com,gnarf/djangoproject.com,hassanabidpk/djangoproject.com,hassanabidpk/djangoproject.com,nanuxbe/django,vxvinh1511/djangoproject.com,gnarf/djangoproject.com,vxvinh1511/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com | from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
admin.site.register(Entry,
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author'),
list_filter = ('is_active',),
exclude = ('summary_html', 'body_html'),
prepopulated_fields = {"slug": ("headline",)}
)
Use proper ModelAdmin for blog entry | from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author')
list_filter = ('is_active',)
exclude = ('summary_html', 'body_html')
prepopulated_fields = {"slug": ("headline",)}
admin.site.register(Entry, EntryAdmin)
| <commit_before>from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
admin.site.register(Entry,
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author'),
list_filter = ('is_active',),
exclude = ('summary_html', 'body_html'),
prepopulated_fields = {"slug": ("headline",)}
)
<commit_msg>Use proper ModelAdmin for blog entry<commit_after> | from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author')
list_filter = ('is_active',)
exclude = ('summary_html', 'body_html')
prepopulated_fields = {"slug": ("headline",)}
admin.site.register(Entry, EntryAdmin)
| from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
admin.site.register(Entry,
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author'),
list_filter = ('is_active',),
exclude = ('summary_html', 'body_html'),
prepopulated_fields = {"slug": ("headline",)}
)
Use proper ModelAdmin for blog entryfrom __future__ import absolute_import
from django.contrib import admin
from .models import Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author')
list_filter = ('is_active',)
exclude = ('summary_html', 'body_html')
prepopulated_fields = {"slug": ("headline",)}
admin.site.register(Entry, EntryAdmin)
| <commit_before>from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
admin.site.register(Entry,
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author'),
list_filter = ('is_active',),
exclude = ('summary_html', 'body_html'),
prepopulated_fields = {"slug": ("headline",)}
)
<commit_msg>Use proper ModelAdmin for blog entry<commit_after>from __future__ import absolute_import
from django.contrib import admin
from .models import Entry
class EntryAdmin(admin.ModelAdmin):
list_display = ('headline', 'pub_date', 'is_active', 'is_published', 'author')
list_filter = ('is_active',)
exclude = ('summary_html', 'body_html')
prepopulated_fields = {"slug": ("headline",)}
admin.site.register(Entry, EntryAdmin)
|
3df68935d0c93135f6cf1749a6d730e3914156e1 | mint/lib/proxiedtransport.py | mint/lib/proxiedtransport.py | #
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1][0]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
| #
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
| Fix latent bug in proxied XMLRPC that broke adding 5.8.x rUS (RBL-7945) | Fix latent bug in proxied XMLRPC that broke adding 5.8.x rUS (RBL-7945)
| Python | apache-2.0 | sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint | #
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1][0]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
Fix latent bug in proxied XMLRPC that broke adding 5.8.x rUS (RBL-7945) | #
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
| <commit_before>#
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1][0]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
<commit_msg>Fix latent bug in proxied XMLRPC that broke adding 5.8.x rUS (RBL-7945)<commit_after> | #
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
| #
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1][0]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
Fix latent bug in proxied XMLRPC that broke adding 5.8.x rUS (RBL-7945)#
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
| <commit_before>#
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1][0]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
<commit_msg>Fix latent bug in proxied XMLRPC that broke adding 5.8.x rUS (RBL-7945)<commit_after>#
# Copyright (c) 2005-2009 rPath, Inc.
#
# All rights reserved
#
import urllib
from conary.repository import transport
class ProxiedTransport(transport.Transport):
"""
Transport class for contacting rUS through a proxy
"""
def __init__(self, *args, **kw):
# Override transport.XMLOpener with our own that does the right thing
# with the selector.
transport.XMLOpener = ProxiedXMLOpener
return transport.Transport.__init__(self, *args, **kw)
def parse_response(self, *args, **kw):
resp = transport.Transport.parse_response(self, *args, **kw)
# The request method on transport.Transport expects this return
# result.
return [[resp,]]
def request(self, *args, **kw):
resp = transport.Transport.request(self, *args, **kw)
# Return just the value.
return resp[0][1]
class ProxiedXMLOpener(transport.XMLOpener):
def createConnection(self, *args, **kw):
h, urlstr, selector, headers = transport.URLOpener.createConnection(self, *args, **kw)
# transport.URLOpener.createConnection leaves selector as the full
# protocol, host, path string. That does not always work with proxy,
# so parse out just the path.
proto, rest = urllib.splittype(selector)
host, rest = urllib.splithost(rest)
return h, urlstr, rest, headers
|
d6c493df4df06f5195c1f964224728ca4e5ace06 | django_project/realtime/management/commands/loadfloodtestdata.py | django_project/realtime/management/commands/loadfloodtestdata.py | # coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
| # coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': hazard_drop_path
}
)
| Fix wrong path to flood data to push. | Fix wrong path to flood data to push.
| Python | bsd-2-clause | AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django | # coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
Fix wrong path to flood data to push. | # coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': hazard_drop_path
}
)
| <commit_before># coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
<commit_msg>Fix wrong path to flood data to push.<commit_after> | # coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': hazard_drop_path
}
)
| # coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
Fix wrong path to flood data to push.# coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': hazard_drop_path
}
)
| <commit_before># coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
<commit_msg>Fix wrong path to flood data to push.<commit_after># coding=utf-8
import os
import shutil
from tempfile import mkdtemp
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
# Copy file to hazard drop directory
REALTIME_HAZARD_DROP = os.environ.get(
'REALTIME_HAZARD_DROP',
'/home/realtime/hazard-drop/')
hazard_drop_path = mkdtemp(dir=REALTIME_HAZARD_DROP)
hazard_drop_path = os.path.join(
hazard_drop_path, os.path.basename(flood_layer_uri))
print 'Copy flood data to %s' % hazard_drop_path
shutil.copy(flood_layer_uri, hazard_drop_path)
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': hazard_drop_path
}
)
|
5709a160e6aad62bcdd8ae35c1b8bf9e8a6f7b6c | wagtail/contrib/wagtailfrontendcache/signal_handlers.py | wagtail/contrib/wagtailfrontendcache/signal_handlers.py | from django.db import models
from django.db.models.signals import post_save, post_delete
from wagtail.wagtailcore.models import Page
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def post_save_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
post_save.connect(post_save_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
| from django.db import models
from django.db.models.signals import post_delete
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.signals import page_published
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def page_published_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
page_published.connect(page_published_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
| Use page_published signal for cache invalidation instead of post_save | Use page_published signal for cache invalidation instead of post_save
| Python | bsd-3-clause | nrsimha/wagtail,nrsimha/wagtail,kurtrwall/wagtail,quru/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,hanpama/wagtail,kaedroho/wagtail,Klaudit/wagtail,marctc/wagtail,mixxorz/wagtail,stevenewey/wagtail,taedori81/wagtail,willcodefortea/wagtail,Klaudit/wagtail,rsalmaso/wagtail,serzans/wagtail,nealtodd/wagtail,Tivix/wagtail,Toshakins/wagtail,jorge-marques/wagtail,hamsterbacke23/wagtail,rjsproxy/wagtail,taedori81/wagtail,thenewguy/wagtail,marctc/wagtail,Toshakins/wagtail,nimasmi/wagtail,Klaudit/wagtail,rsalmaso/wagtail,mephizzle/wagtail,nilnvoid/wagtail,mjec/wagtail,timorieber/wagtail,lojack/wagtail,wagtail/wagtail,taedori81/wagtail,mikedingjan/wagtail,bjesus/wagtail,mayapurmedia/wagtail,nilnvoid/wagtail,gogobook/wagtail,takeshineshiro/wagtail,bjesus/wagtail,darith27/wagtail,inonit/wagtail,stevenewey/wagtail,mephizzle/wagtail,Pennebaker/wagtail,timorieber/wagtail,iho/wagtail,nilnvoid/wagtail,KimGlazebrook/wagtail-experiment,kaedroho/wagtail,takeflight/wagtail,mikedingjan/wagtail,nrsimha/wagtail,JoshBarr/wagtail,gogobook/wagtail,thenewguy/wagtail,zerolab/wagtail,nrsimha/wagtail,gogobook/wagtail,chimeno/wagtail,bjesus/wagtail,nimasmi/wagtail,jordij/wagtail,takeshineshiro/wagtail,quru/wagtail,iansprice/wagtail,takeshineshiro/wagtail,jnns/wagtail,jnns/wagtail,takeshineshiro/wagtail,mikedingjan/wagtail,mayapurmedia/wagtail,bjesus/wagtail,timorieber/wagtail,nealtodd/wagtail,hanpama/wagtail,benemery/wagtail,rjsproxy/wagtail,iansprice/wagtail,mayapurmedia/wagtail,mephizzle/wagtail,janusnic/wagtail,inonit/wagtail,davecranwell/wagtail,FlipperPA/wagtail,lojack/wagtail,serzans/wagtail,chimeno/wagtail,nealtodd/wagtail,kurtrwall/wagtail,zerolab/wagtail,janusnic/wagtail,WQuanfeng/wagtail,zerolab/wagtail,JoshBarr/wagtail,nimasmi/wagtail,Toshakins/wagtail,zerolab/wagtail,KimGlazebrook/wagtail-experiment,davecranwell/wagtail,FlipperPA/wagtail,kurtrwall/wagtail,gasman/wagtail,rjsproxy/wagtail,JoshBarr/wagtail,nutztherookie/wagtail,dresiu/wagtail,rv816/wagtail,Toshakins/wagtail,takeflight/wagtail,rv816/wagtail,dresiu/wagtail,Tivix/wagtail,mjec/wagtail,jnns/wagtail,chrxr/wagtail,thenewguy/wagtail,chimeno/wagtail,willcodefortea/wagtail,Tivix/wagtail,tangentlabs/wagtail,darith27/wagtail,thenewguy/wagtail,Pennebaker/wagtail,willcodefortea/wagtail,stevenewey/wagtail,taedori81/wagtail,timorieber/wagtail,chrxr/wagtail,jordij/wagtail,torchbox/wagtail,gasman/wagtail,kurtw/wagtail,rjsproxy/wagtail,jorge-marques/wagtail,dresiu/wagtail,kurtrwall/wagtail,torchbox/wagtail,rv816/wagtail,mjec/wagtail,dresiu/wagtail,rsalmaso/wagtail,thenewguy/wagtail,iho/wagtail,quru/wagtail,FlipperPA/wagtail,KimGlazebrook/wagtail-experiment,jordij/wagtail,torchbox/wagtail,serzans/wagtail,FlipperPA/wagtail,gasman/wagtail,rsalmaso/wagtail,mixxorz/wagtail,iho/wagtail,rv816/wagtail,Tivix/wagtail,100Shapes/wagtail,m-sanders/wagtail,chrxr/wagtail,tangentlabs/wagtail,chrxr/wagtail,m-sanders/wagtail,nilnvoid/wagtail,dresiu/wagtail,kaedroho/wagtail,benemery/wagtail,zerolab/wagtail,kaedroho/wagtail,mixxorz/wagtail,marctc/wagtail,lojack/wagtail,gasman/wagtail,iansprice/wagtail,hanpama/wagtail,mayapurmedia/wagtail,Pennebaker/wagtail,wagtail/wagtail,benemery/wagtail,JoshBarr/wagtail,mixxorz/wagtail,quru/wagtail,tangentlabs/wagtail,inonit/wagtail,wagtail/wagtail,WQuanfeng/wagtail,gogobook/wagtail,Pennebaker/wagtail,wagtail/wagtail,davecranwell/wagtail,iansprice/wagtail,kaedroho/wagtail,davecranwell/wagtail,torchbox/wagtail,kurtw/wagtail,chimeno/wagtail,rsalmaso/wagtail,m-sanders/wagtail,takeflight/wagtail,100Shapes/wagtail,janusnic/wagtail,takeflight/wagtail,janusnic/wagtail,jorge-marques/wagtail,benjaoming/wagtail,taedori81/wagtail,kurtw/wagtail,100Shapes/wagtail,kurtw/wagtail,m-sanders/wagtail,hanpama/wagtail,hamsterbacke23/wagtail,inonit/wagtail,jorge-marques/wagtail,WQuanfeng/wagtail,hamsterbacke23/wagtail,serzans/wagtail,willcodefortea/wagtail,benjaoming/wagtail,hamsterbacke23/wagtail,benjaoming/wagtail,marctc/wagtail,stevenewey/wagtail,iho/wagtail,gasman/wagtail,nealtodd/wagtail,jnns/wagtail,darith27/wagtail,chimeno/wagtail,KimGlazebrook/wagtail-experiment,benjaoming/wagtail,jorge-marques/wagtail,nutztherookie/wagtail,jordij/wagtail,mixxorz/wagtail,nutztherookie/wagtail,WQuanfeng/wagtail,benemery/wagtail,mjec/wagtail,darith27/wagtail,mikedingjan/wagtail,mephizzle/wagtail,wagtail/wagtail,nimasmi/wagtail,Klaudit/wagtail | from django.db import models
from django.db.models.signals import post_save, post_delete
from wagtail.wagtailcore.models import Page
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def post_save_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
post_save.connect(post_save_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
Use page_published signal for cache invalidation instead of post_save | from django.db import models
from django.db.models.signals import post_delete
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.signals import page_published
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def page_published_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
page_published.connect(page_published_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
| <commit_before>from django.db import models
from django.db.models.signals import post_save, post_delete
from wagtail.wagtailcore.models import Page
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def post_save_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
post_save.connect(post_save_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
<commit_msg>Use page_published signal for cache invalidation instead of post_save<commit_after> | from django.db import models
from django.db.models.signals import post_delete
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.signals import page_published
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def page_published_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
page_published.connect(page_published_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
| from django.db import models
from django.db.models.signals import post_save, post_delete
from wagtail.wagtailcore.models import Page
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def post_save_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
post_save.connect(post_save_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
Use page_published signal for cache invalidation instead of post_savefrom django.db import models
from django.db.models.signals import post_delete
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.signals import page_published
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def page_published_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
page_published.connect(page_published_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
| <commit_before>from django.db import models
from django.db.models.signals import post_save, post_delete
from wagtail.wagtailcore.models import Page
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def post_save_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
post_save.connect(post_save_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
<commit_msg>Use page_published signal for cache invalidation instead of post_save<commit_after>from django.db import models
from django.db.models.signals import post_delete
from wagtail.wagtailcore.models import Page
from wagtail.wagtailcore.signals import page_published
from wagtail.contrib.wagtailfrontendcache.utils import purge_page_from_cache
def page_published_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def post_delete_signal_handler(instance, **kwargs):
purge_page_from_cache(instance)
def register_signal_handlers():
# Get list of models that are page types
indexed_models = [model for model in models.get_models() if issubclass(model, Page)]
# Loop through list and register signal handlers for each one
for model in indexed_models:
page_published.connect(page_published_signal_handler, sender=model)
post_delete.connect(post_delete_signal_handler, sender=model)
|
9e513650f166e3ec85363da61ea1d36b601f0b72 | throughput/experiment.py | throughput/experiment.py | from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 1
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
| from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 5
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
| Fix channel number in the comment | Fix channel number in the comment | Python | agpl-3.0 | megfault/aDTN-python-experiment,megfault/aDTN-python-experiment | from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 1
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
Fix channel number in the comment | from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 5
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
| <commit_before>from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 1
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
<commit_msg>Fix channel number in the comment<commit_after> | from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 5
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
| from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 1
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
Fix channel number in the commentfrom time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 5
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
| <commit_before>from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 1
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
<commit_msg>Fix channel number in the comment<commit_after>from time import time, sleep
from subprocess import call
from argparse import ArgumentParser
from atexit import register
from pyadtn.aDTN import aDTN
from pyadtn.utils import info, debug
EXPERIMENT_DURATION = 5 * 60 + 10 # 5 minutes and 5 seconds (in seconds)
IFACE = "wlan0"
FREQ = str(2432) # 802.11 channel 5
if __name__ == "__main__":
parser = ArgumentParser()
parser.add_argument('device_id', type=str, help='the hostname of this device')
parser.add_argument('sending_freq', type=int, help='time between sending of a batch (in seconds)')
parser.add_argument('batch_size', type=int, help='batch size (in number of packets)')
args = parser.parse_args()
device_id = args.device_id
sf = args.sending_freq
bs = args.batch_size
call(("./network-setup.sh", IFACE))
call(["iw", IFACE, "ibss", "join", "test", FREQ])
# Inform about current config.
experiment_id = "throughput_" + "_".join(
[str(i) for i in ["bs", bs, "sf", sf, "cr"]])
info("\nNow running: {}".format(experiment_id))
# Start aDTN
adtn = aDTN(bs, sf, IFACE, experiment_id)
adtn.start()
sleep(EXPERIMENT_DURATION)
adtn.stop()
|
bf042cbe47c9fcfc0e608ff726a73d0e562027d0 | tests/test_with_hypothesis.py | tests/test_with_hypothesis.py | import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(bytes, bytes)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
| import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(
hypothesis.strategies.binary(),
hypothesis.strategies.binary()
)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
| Fix the Hypothesis test to work with new API. | Fix the Hypothesis test to work with new API.
The Hypothesis API has since moved on from the last time we pushed
a change. Fix the test suite to work with the new API.
| Python | apache-2.0 | Ayrx/python-aead,Ayrx/python-aead | import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(bytes, bytes)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
Fix the Hypothesis test to work with new API.
The Hypothesis API has since moved on from the last time we pushed
a change. Fix the test suite to work with the new API. | import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(
hypothesis.strategies.binary(),
hypothesis.strategies.binary()
)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
| <commit_before>import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(bytes, bytes)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
<commit_msg>Fix the Hypothesis test to work with new API.
The Hypothesis API has since moved on from the last time we pushed
a change. Fix the test suite to work with the new API.<commit_after> | import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(
hypothesis.strategies.binary(),
hypothesis.strategies.binary()
)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
| import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(bytes, bytes)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
Fix the Hypothesis test to work with new API.
The Hypothesis API has since moved on from the last time we pushed
a change. Fix the test suite to work with the new API.import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(
hypothesis.strategies.binary(),
hypothesis.strategies.binary()
)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
| <commit_before>import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(bytes, bytes)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
<commit_msg>Fix the Hypothesis test to work with new API.
The Hypothesis API has since moved on from the last time we pushed
a change. Fix the test suite to work with the new API.<commit_after>import pytest
from aead import AEAD
hypothesis = pytest.importorskip("hypothesis")
@hypothesis.given(
hypothesis.strategies.binary(),
hypothesis.strategies.binary()
)
def test_round_trip_encrypt_decrypt(plaintext, associated_data):
cryptor = AEAD(AEAD.generate_key())
ct = cryptor.encrypt(plaintext, associated_data)
assert plaintext == cryptor.decrypt(ct, associated_data)
|
5520ebc1c232a69994d0941b7563f567d8defd0b | telemetry/telemetry/core/cast_interface.py | telemetry/telemetry/core/cast_interface.py | # Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
SSH_PWD = "user"
SSH_USER = "user"
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
| # Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
SSH_PWD = "root"
SSH_USER = "root"
| Change user and password used for Cast hardware devices | [cast3p] Change user and password used for Cast hardware devices
Change-Id: Id636d40afea79d08ce9af952438d5396add505e3
Reviewed-on: https://chromium-review.googlesource.com/c/catapult/+/3602946
Commit-Queue: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Auto-Submit: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Reviewed-by: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org>
Commit-Queue: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org>
| Python | bsd-3-clause | catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult | # Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
SSH_PWD = "user"
SSH_USER = "user"
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
[cast3p] Change user and password used for Cast hardware devices
Change-Id: Id636d40afea79d08ce9af952438d5396add505e3
Reviewed-on: https://chromium-review.googlesource.com/c/catapult/+/3602946
Commit-Queue: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Auto-Submit: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Reviewed-by: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org>
Commit-Queue: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org> | # Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
SSH_PWD = "root"
SSH_USER = "root"
| <commit_before># Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
SSH_PWD = "user"
SSH_USER = "user"
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
<commit_msg>[cast3p] Change user and password used for Cast hardware devices
Change-Id: Id636d40afea79d08ce9af952438d5396add505e3
Reviewed-on: https://chromium-review.googlesource.com/c/catapult/+/3602946
Commit-Queue: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Auto-Submit: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Reviewed-by: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org>
Commit-Queue: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org><commit_after> | # Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
SSH_PWD = "root"
SSH_USER = "root"
| # Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
SSH_PWD = "user"
SSH_USER = "user"
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
[cast3p] Change user and password used for Cast hardware devices
Change-Id: Id636d40afea79d08ce9af952438d5396add505e3
Reviewed-on: https://chromium-review.googlesource.com/c/catapult/+/3602946
Commit-Queue: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Auto-Submit: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Reviewed-by: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org>
Commit-Queue: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org># Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
SSH_PWD = "root"
SSH_USER = "root"
| <commit_before># Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
SSH_PWD = "user"
SSH_USER = "user"
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
<commit_msg>[cast3p] Change user and password used for Cast hardware devices
Change-Id: Id636d40afea79d08ce9af952438d5396add505e3
Reviewed-on: https://chromium-review.googlesource.com/c/catapult/+/3602946
Commit-Queue: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Auto-Submit: Chong Gu <2b40eb872a3eb33d1a32a10811f471f8a41ba08b@google.com>
Reviewed-by: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org>
Commit-Queue: Brian Sheedy <8cdc5cc1fa60ba15acb0d296e6f5592b7bb7d71c@chromium.org><commit_after># Copyright 2022 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A wrapper for common operations on a device with Cast capabilities."""
import os
from telemetry.core import util
CAST_BROWSERS = [
'platform_app'
]
DEFAULT_CAST_CORE_DIR = os.path.join(util.GetCatapultDir(), '..', 'cast_core',
'prebuilts')
DEFAULT_CWR_EXE = os.path.join(util.GetCatapultDir(), '..', 'cast_web_runtime')
SSH_PWD = "root"
SSH_USER = "root"
|
2b46bee644222c2e1c29d20ffc23768ed11006d6 | VMEncryption/main/oscrypto/encryptstates/SelinuxState.py | VMEncryption/main/oscrypto/encryptstates/SelinuxState.py | #!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
| #!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
se_linux_status = self.context.encryption_environment.get_se_linux()
if(se_linux_status.lower() == 'enforcing'):
self.context.logger.log("SELinux is in enforcing mode, disabling")
self.context.encryption_environment.disable_se_linux()
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
| Disable SELinux before OS disk encryption | Disable SELinux before OS disk encryption
| Python | apache-2.0 | vityagi/azure-linux-extensions,vityagi/azure-linux-extensions,andyliuliming/azure-linux-extensions,andyliuliming/azure-linux-extensions,soumyanishan/azure-linux-extensions,bpramod/azure-linux-extensions,krkhan/azure-linux-extensions,vityagi/azure-linux-extensions,jasonzio/azure-linux-extensions,andyliuliming/azure-linux-extensions,Azure/azure-linux-extensions,vityagi/azure-linux-extensions,krkhan/azure-linux-extensions,bpramod/azure-linux-extensions,varunkumta/azure-linux-extensions,bpramod/azure-linux-extensions,Azure/azure-linux-extensions,jasonzio/azure-linux-extensions,bpramod/azure-linux-extensions,Azure/azure-linux-extensions,andyliuliming/azure-linux-extensions,Azure/azure-linux-extensions,varunkumta/azure-linux-extensions,krkhan/azure-linux-extensions,jasonzio/azure-linux-extensions,vityagi/azure-linux-extensions,Azure/azure-linux-extensions,bpramod/azure-linux-extensions,Azure/azure-linux-extensions,vityagi/azure-linux-extensions,soumyanishan/azure-linux-extensions,krkhan/azure-linux-extensions,jasonzio/azure-linux-extensions,varunkumta/azure-linux-extensions,vityagi/azure-linux-extensions,bpramod/azure-linux-extensions,varunkumta/azure-linux-extensions,Azure/azure-linux-extensions,soumyanishan/azure-linux-extensions,soumyanishan/azure-linux-extensions,bpramod/azure-linux-extensions,soumyanishan/azure-linux-extensions | #!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
Disable SELinux before OS disk encryption | #!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
se_linux_status = self.context.encryption_environment.get_se_linux()
if(se_linux_status.lower() == 'enforcing'):
self.context.logger.log("SELinux is in enforcing mode, disabling")
self.context.encryption_environment.disable_se_linux()
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
| <commit_before>#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
<commit_msg>Disable SELinux before OS disk encryption<commit_after> | #!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
se_linux_status = self.context.encryption_environment.get_se_linux()
if(se_linux_status.lower() == 'enforcing'):
self.context.logger.log("SELinux is in enforcing mode, disabling")
self.context.encryption_environment.disable_se_linux()
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
| #!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
Disable SELinux before OS disk encryption#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
se_linux_status = self.context.encryption_environment.get_se_linux()
if(se_linux_status.lower() == 'enforcing'):
self.context.logger.log("SELinux is in enforcing mode, disabling")
self.context.encryption_environment.disable_se_linux()
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
| <commit_before>#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
<commit_msg>Disable SELinux before OS disk encryption<commit_after>#!/usr/bin/env python
#
# VM Backup extension
#
# Copyright 2015 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Requires Python 2.7+
#
from OSEncryptionState import *
class SelinuxState(OSEncryptionState):
def __init__(self, context):
super(SelinuxState, self).__init__('SelinuxState', context)
def enter(self):
if not super(SelinuxState, self).should_enter():
return
self.context.logger.log("Entering selinux state")
se_linux_status = self.context.encryption_environment.get_se_linux()
if(se_linux_status.lower() == 'enforcing'):
self.context.logger.log("SELinux is in enforcing mode, disabling")
self.context.encryption_environment.disable_se_linux()
def should_exit(self):
self.context.logger.log("Verifying if machine should exit selinux state")
return super(SelinuxState, self).should_exit()
|
0128a0cc3c266848181ed2f6af3db34cc9c99b5d | terroroftinytown/services/googl.py | terroroftinytown/services/googl.py |
from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
| from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
| Use spaces instead of tabs | Use spaces instead of tabs
| Python | mit | ArchiveTeam/terroroftinytown,ArchiveTeam/terroroftinytown,ArchiveTeam/terroroftinytown |
from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
Use spaces instead of tabs | from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
| <commit_before>
from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
<commit_msg>Use spaces instead of tabs<commit_after> | from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
|
from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
Use spaces instead of tabsfrom terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
| <commit_before>
from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
<commit_msg>Use spaces instead of tabs<commit_after>from terroroftinytown.services.base import BaseService
from terroroftinytown.services.status import URLStatus
import re
class GooglService(BaseService):
def process_response(self, response):
status_code = response.status_code
if status_code in self.params['redirect_codes']:
if self.ratelimited(response):
return self.process_banned(response)
return self.process_redirect(response)
elif status_code in self.params['no_redirect_codes']:
return self.process_no_redirect(response)
elif status_code in self.params['unavailable_codes']:
return self.process_unavailable(response)
elif status_code in self.params['banned_codes']:
return self.process_banned(response)
else:
return self.process_unknown_code(response)
def ratelimited(self, response):
if 'Location' not in response.headers:
return False
result_url = response.headers['Location']
response.content # read the response to allow connection reuse
return not not re.search('^https?://(?:www\.)?google\.com/sorry', result_url)
|
ba4b348e03f5f875bb170a8b7d5c560ba7c6968f | features/groups/migrations/0002_auto_20160922_1108.py | features/groups/migrations/0002_auto_20160922_1108.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.all():
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.order_by('id'):
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
| Order groups by id when copying | Order groups by id when copying
| Python | agpl-3.0 | stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.all():
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
Order groups by id when copying | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.order_by('id'):
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.all():
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
<commit_msg>Order groups by id when copying<commit_after> | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.order_by('id'):
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.all():
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
Order groups by id when copying# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.order_by('id'):
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
| <commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.all():
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
<commit_msg>Order groups by id when copying<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-09-22 09:08
from __future__ import unicode_literals
from django.db import migrations
def copy_groups(apps, schema_editor):
Group1 = apps.get_model('entities.Group')
Group2 = apps.get_model('groups.Group')
for g in Group1.objects.order_by('id'):
g2 = Group2.objects.create(
name=g.name,
slug=g.slug,
address=g.address,
avatar=g.avatar,
avatar_color=g.avatar_color,
date_founded=g.date_founded,
description=g.description,
logo=g.logo,
url=g.url,
closed=g.closed)
g2.date_created = g.date_created
g2.slug = g.slug
g2.save()
class Migration(migrations.Migration):
dependencies = [
('groups', '0001_initial'),
]
operations = [
migrations.RunPython(copy_groups)
]
|
25db9110d34760118b47b2bdf637cf6947154c2c | tests/unit/distributed/test_objectstore.py | tests/unit/distributed/test_objectstore.py | import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
| import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
def test_filename_with_json_key_is_present(mock_api):
assert GoogleDrive.GOOGLE_API_KEY_FILE
assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
| Test json file with api key is in API service class | Test json file with api key is in API service class
| Python | mit | a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,biocyberman/bcbio-nextgen,biocyberman/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,biocyberman/bcbio-nextgen,vladsaveliev/bcbio-nextgen,chapmanb/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,vladsaveliev/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen | import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
Test json file with api key is in API service class | import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
def test_filename_with_json_key_is_present(mock_api):
assert GoogleDrive.GOOGLE_API_KEY_FILE
assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
| <commit_before>import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
<commit_msg>Test json file with api key is in API service class<commit_after> | import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
def test_filename_with_json_key_is_present(mock_api):
assert GoogleDrive.GOOGLE_API_KEY_FILE
assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
| import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
Test json file with api key is in API service classimport pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
def test_filename_with_json_key_is_present(mock_api):
assert GoogleDrive.GOOGLE_API_KEY_FILE
assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
| <commit_before>import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
<commit_msg>Test json file with api key is in API service class<commit_after>import pytest
from bcbio.distributed import objectstore
from bcbio.distributed.objectstore import GoogleDrive
@pytest.fixture
def mock_api(mocker):
mocker.patch('bcbio.distributed.objectstore.ServiceAccountCredentials')
mocker.patch('bcbio.distributed.objectstore.Http')
mocker.patch('bcbio.distributed.objectstore.build')
mocker.patch('bcbio.distributed.objectstore.http')
yield None
def test_create_google_drive_service(mock_api):
service = GoogleDrive()
assert service
def test_creates_http_auth(mock_api):
GoogleDrive()
objectstore.ServiceAccountCredentials.from_json_keyfile_name\
.assert_called_once_with(
GoogleDrive.GOOGLE_API_KEY_FILE, scopes=GoogleDrive.SCOPES)
def test_api_scope_includes_google_drive(mock_api):
drive_scope = 'https://www.googleapis.com/auth/drive'
assert drive_scope in GoogleDrive.SCOPES
def test_filename_with_json_key_is_present(mock_api):
assert GoogleDrive.GOOGLE_API_KEY_FILE
assert GoogleDrive.GOOGLE_API_KEY_FILE.endswith('.json')
|
68812938503901df48b9f3c7cd3b3160d51a52fa | txaws/client/_validators.py | txaws/client/_validators.py | # Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ListOf(validator)
@attr.s(frozen=True)
class _ListOf(object):
"""
attrs validator for a list of elements which satisfy another
validator.
L{list_of} is the public constructor to hide the type and prevent
subclassing.
"""
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(list)(inst, a, value)
for n, element in enumerate(value):
inner_identifier = u"{}[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
| # Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ContainerOf(list, validator)
def set_of(validator):
"""
Require a value which is a set containing elements which the given
validator accepts.
"""
return _ContainerOf(set, validator)
@attr.s(frozen=True)
class _ContainerOf(object):
"""
attrs validator for a container of objects which satisfy another
validator.
L{list_of}, L{set_of}, etc are the public constructors to hide the
type and prevent subclassing.
"""
container_type = attr.ib()
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(self.container_type)(inst, a, value)
for n, element in enumerate(sorted(value)):
inner_identifier = u"sorted({})[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
| Introduce set_of (similar to list_of validator) | Introduce set_of (similar to list_of validator)
| Python | mit | oubiwann/txaws,mithrandi/txaws,mithrandi/txaws,twisted/txaws,twisted/txaws,oubiwann/txaws | # Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ListOf(validator)
@attr.s(frozen=True)
class _ListOf(object):
"""
attrs validator for a list of elements which satisfy another
validator.
L{list_of} is the public constructor to hide the type and prevent
subclassing.
"""
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(list)(inst, a, value)
for n, element in enumerate(value):
inner_identifier = u"{}[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
Introduce set_of (similar to list_of validator) | # Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ContainerOf(list, validator)
def set_of(validator):
"""
Require a value which is a set containing elements which the given
validator accepts.
"""
return _ContainerOf(set, validator)
@attr.s(frozen=True)
class _ContainerOf(object):
"""
attrs validator for a container of objects which satisfy another
validator.
L{list_of}, L{set_of}, etc are the public constructors to hide the
type and prevent subclassing.
"""
container_type = attr.ib()
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(self.container_type)(inst, a, value)
for n, element in enumerate(sorted(value)):
inner_identifier = u"sorted({})[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
| <commit_before># Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ListOf(validator)
@attr.s(frozen=True)
class _ListOf(object):
"""
attrs validator for a list of elements which satisfy another
validator.
L{list_of} is the public constructor to hide the type and prevent
subclassing.
"""
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(list)(inst, a, value)
for n, element in enumerate(value):
inner_identifier = u"{}[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
<commit_msg>Introduce set_of (similar to list_of validator)<commit_after> | # Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ContainerOf(list, validator)
def set_of(validator):
"""
Require a value which is a set containing elements which the given
validator accepts.
"""
return _ContainerOf(set, validator)
@attr.s(frozen=True)
class _ContainerOf(object):
"""
attrs validator for a container of objects which satisfy another
validator.
L{list_of}, L{set_of}, etc are the public constructors to hide the
type and prevent subclassing.
"""
container_type = attr.ib()
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(self.container_type)(inst, a, value)
for n, element in enumerate(sorted(value)):
inner_identifier = u"sorted({})[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
| # Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ListOf(validator)
@attr.s(frozen=True)
class _ListOf(object):
"""
attrs validator for a list of elements which satisfy another
validator.
L{list_of} is the public constructor to hide the type and prevent
subclassing.
"""
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(list)(inst, a, value)
for n, element in enumerate(value):
inner_identifier = u"{}[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
Introduce set_of (similar to list_of validator)# Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ContainerOf(list, validator)
def set_of(validator):
"""
Require a value which is a set containing elements which the given
validator accepts.
"""
return _ContainerOf(set, validator)
@attr.s(frozen=True)
class _ContainerOf(object):
"""
attrs validator for a container of objects which satisfy another
validator.
L{list_of}, L{set_of}, etc are the public constructors to hide the
type and prevent subclassing.
"""
container_type = attr.ib()
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(self.container_type)(inst, a, value)
for n, element in enumerate(sorted(value)):
inner_identifier = u"sorted({})[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
| <commit_before># Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ListOf(validator)
@attr.s(frozen=True)
class _ListOf(object):
"""
attrs validator for a list of elements which satisfy another
validator.
L{list_of} is the public constructor to hide the type and prevent
subclassing.
"""
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(list)(inst, a, value)
for n, element in enumerate(value):
inner_identifier = u"{}[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
<commit_msg>Introduce set_of (similar to list_of validator)<commit_after># Licenced under the txaws licence available at /LICENSE in the txaws source.
"""
attrs validators for internal use.
"""
import attr
from attr import validators
def list_of(validator):
"""
Require a value which is a list containing elements which the
given validator accepts.
"""
return _ContainerOf(list, validator)
def set_of(validator):
"""
Require a value which is a set containing elements which the given
validator accepts.
"""
return _ContainerOf(set, validator)
@attr.s(frozen=True)
class _ContainerOf(object):
"""
attrs validator for a container of objects which satisfy another
validator.
L{list_of}, L{set_of}, etc are the public constructors to hide the
type and prevent subclassing.
"""
container_type = attr.ib()
validator = attr.ib()
def __call__(self, inst, a, value):
validators.instance_of(self.container_type)(inst, a, value)
for n, element in enumerate(sorted(value)):
inner_identifier = u"sorted({})[{}]".format(a.name, n)
# Create an Attribute with a name that refers to the
# validator we're using and the index we're validating.
# Otherwise the validation failure is pretty confusing.
inner_attr = attr.Attribute(
name=inner_identifier,
default=None,
validator=self.validator,
repr=False,
cmp=False,
hash=False,
init=False,
)
self.validator(inst, inner_attr, element)
|
6dab7ceeb4de601c47b4d370c6184ddcd0110e89 | doc/conf.py | doc/conf.py | # -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
| # -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = pkg_resources.require('sphinxcontrib-ros')[0].version
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
| Use the source version as a doc version | Use the source version as a doc version
| Python | bsd-2-clause | otamachan/sphinxcontrib-ros,otamachan/sphinxcontrib-ros | # -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
Use the source version as a doc version | # -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = pkg_resources.require('sphinxcontrib-ros')[0].version
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
| <commit_before># -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
<commit_msg>Use the source version as a doc version<commit_after> | # -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = pkg_resources.require('sphinxcontrib-ros')[0].version
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
| # -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
Use the source version as a doc version# -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = pkg_resources.require('sphinxcontrib-ros')[0].version
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
| <commit_before># -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
<commit_msg>Use the source version as a doc version<commit_after># -*- coding: utf-8 -*-
import os
import sys
import sphinx_rtd_theme
import pkg_resources
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'src'))
import sphinxcontrib; reload(sphinxcontrib)
extensions = ['sphinxcontrib.ros']
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'sphinxcontritb-ros'
copyright = u'2015, Tamaki Nishino'
version = pkg_resources.require('sphinxcontrib-ros')[0].version
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
def setup(app):
app.add_description_unit('confval', 'confval',
'pair: %s; configuration value')
|
d5fd80a02ca619655f0b6d470acb745ec4432ba5 | e2e_test.py | e2e_test.py | # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
assert(html == "James Joyce")
| # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
print(html)
assert(html == "James Joyce")
| Add Print Statement For Easier Debugging | Add Print Statement For Easier Debugging
| Python | apache-2.0 | bshaffer/appengine-python-vm-hello,googlearchive/appengine-python-vm-hello,bshaffer/appengine-python-vm-hello,googlearchive/appengine-python-vm-hello | # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
assert(html == "James Joyce")
Add Print Statement For Easier Debugging | # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
print(html)
assert(html == "James Joyce")
| <commit_before># Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
assert(html == "James Joyce")
<commit_msg>Add Print Statement For Easier Debugging<commit_after> | # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
print(html)
assert(html == "James Joyce")
| # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
assert(html == "James Joyce")
Add Print Statement For Easier Debugging# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
print(html)
assert(html == "James Joyce")
| <commit_before># Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
assert(html == "James Joyce")
<commit_msg>Add Print Statement For Easier Debugging<commit_after># Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use
# this file except in compliance with the License. You may obtain a copy of the
# License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable
# law or agreed to in writing, software distributed under the License is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and
# limitations under the License.
import urllib2
import logging
HOST='http://continuous-deployment-python.appspot.com'
response = urllib2.urlopen("{}/get_author/ulysses".format(HOST))
html = response.read()
print(html)
assert(html == "James Joyce")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.