commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
427a4b50934e4c4353d98851a33352961d05d051
|
backend/submissions/types.py
|
backend/submissions/types.py
|
import graphene
from graphene_django import DjangoObjectType
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(graphene.NonNull(VoteType)))
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
|
import graphene
from graphene_django import DjangoObjectType
from voting.models import Vote
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(VoteType))
my_vote = graphene.Field(VoteType, user_id=graphene.ID())
def resolve_my_vote(self, info, user_id):
try:
return self.votes.get(user_id=user_id)
except Vote.DoesNotExist:
return None
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
|
Add logged user vote field to SubmissionType
|
Add logged user vote field to SubmissionType
|
Python
|
mit
|
patrick91/pycon,patrick91/pycon
|
import graphene
from graphene_django import DjangoObjectType
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(graphene.NonNull(VoteType)))
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
Add logged user vote field to SubmissionType
|
import graphene
from graphene_django import DjangoObjectType
from voting.models import Vote
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(VoteType))
my_vote = graphene.Field(VoteType, user_id=graphene.ID())
def resolve_my_vote(self, info, user_id):
try:
return self.votes.get(user_id=user_id)
except Vote.DoesNotExist:
return None
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
|
<commit_before>import graphene
from graphene_django import DjangoObjectType
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(graphene.NonNull(VoteType)))
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
<commit_msg>Add logged user vote field to SubmissionType<commit_after>
|
import graphene
from graphene_django import DjangoObjectType
from voting.models import Vote
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(VoteType))
my_vote = graphene.Field(VoteType, user_id=graphene.ID())
def resolve_my_vote(self, info, user_id):
try:
return self.votes.get(user_id=user_id)
except Vote.DoesNotExist:
return None
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
|
import graphene
from graphene_django import DjangoObjectType
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(graphene.NonNull(VoteType)))
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
Add logged user vote field to SubmissionTypeimport graphene
from graphene_django import DjangoObjectType
from voting.models import Vote
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(VoteType))
my_vote = graphene.Field(VoteType, user_id=graphene.ID())
def resolve_my_vote(self, info, user_id):
try:
return self.votes.get(user_id=user_id)
except Vote.DoesNotExist:
return None
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
|
<commit_before>import graphene
from graphene_django import DjangoObjectType
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(graphene.NonNull(VoteType)))
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
<commit_msg>Add logged user vote field to SubmissionType<commit_after>import graphene
from graphene_django import DjangoObjectType
from voting.models import Vote
from voting.types import VoteType
from .models import Submission
from .models import SubmissionType as ModelSubmissionType
class SubmissionTypeType(DjangoObjectType):
class Meta:
model = ModelSubmissionType
only_fields = ("id", "name")
class SubmissionType(DjangoObjectType):
votes = graphene.NonNull(graphene.List(VoteType))
my_vote = graphene.Field(VoteType, user_id=graphene.ID())
def resolve_my_vote(self, info, user_id):
try:
return self.votes.get(user_id=user_id)
except Vote.DoesNotExist:
return None
def resolve_votes(self, info):
return self.votes.all()
class Meta:
model = Submission
only_fields = (
"id",
"conference",
"title",
"elevator_pitch",
"notes",
"abstract",
"owner",
"helpers",
"topic",
"type",
"duration",
"votes",
)
|
93eb8e22b823c523833538cd93dfc2bf48f80854
|
mpfmc/_version.py
|
mpfmc/_version.py
|
__version__ = '0.33.0-dev.12'
__short_version__ = '0.33'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.33.0-dev.15'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
|
__version__ = '0.50.0-dev.1'
__short_version__ = '0.50'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.50.0-dev.1'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
|
Change dev version to 0.50.0-dev.1
|
Change dev version to 0.50.0-dev.1
|
Python
|
mit
|
missionpinball/mpf-mc,missionpinball/mpf-mc,missionpinball/mpf-mc
|
__version__ = '0.33.0-dev.12'
__short_version__ = '0.33'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.33.0-dev.15'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
Change dev version to 0.50.0-dev.1
|
__version__ = '0.50.0-dev.1'
__short_version__ = '0.50'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.50.0-dev.1'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
|
<commit_before>__version__ = '0.33.0-dev.12'
__short_version__ = '0.33'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.33.0-dev.15'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
<commit_msg>Change dev version to 0.50.0-dev.1<commit_after>
|
__version__ = '0.50.0-dev.1'
__short_version__ = '0.50'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.50.0-dev.1'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
|
__version__ = '0.33.0-dev.12'
__short_version__ = '0.33'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.33.0-dev.15'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
Change dev version to 0.50.0-dev.1__version__ = '0.50.0-dev.1'
__short_version__ = '0.50'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.50.0-dev.1'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
|
<commit_before>__version__ = '0.33.0-dev.12'
__short_version__ = '0.33'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.33.0-dev.15'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
<commit_msg>Change dev version to 0.50.0-dev.1<commit_after>__version__ = '0.50.0-dev.1'
__short_version__ = '0.50'
__bcp_version__ = '1.1'
__config_version__ = '4'
__mpf_version_required__ = '0.50.0-dev.1'
version = "MPF-MC v{} (config_version={}, BCP v{}, Requires MPF v{})".format(
__version__, __config_version__, __bcp_version__, __mpf_version_required__)
|
ab7a8335bae22bae6f729fc9805810c0c8925703
|
isitbullshit/__init__.py
|
isitbullshit/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
|
# -*- coding: utf-8 -*-
__author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
|
Add myself to the module
|
Add myself to the module
|
Python
|
mit
|
9seconds/isitbullshit
|
# -*- coding: utf-8 -*-
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
Add myself to the module
|
# -*- coding: utf-8 -*-
__author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
|
<commit_before># -*- coding: utf-8 -*-
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
<commit_msg>Add myself to the module<commit_after>
|
# -*- coding: utf-8 -*-
__author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
|
# -*- coding: utf-8 -*-
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
Add myself to the module# -*- coding: utf-8 -*-
__author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
|
<commit_before># -*- coding: utf-8 -*-
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
<commit_msg>Add myself to the module<commit_after># -*- coding: utf-8 -*-
__author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
|
88a6708061ccdc7d3ac4d031c48de44039937b54
|
frontends/etiquette_flask/etiquette_flask_entrypoint.py
|
frontends/etiquette_flask/etiquette_flask_entrypoint.py
|
'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.contrib.fixers
import backend
backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app)
site = backend.site
|
'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.middleware.proxy_fix
import backend
backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app)
site = backend.site
|
Replace werkzeug.contrib with werkzeug.middleware proxyfix.
|
Replace werkzeug.contrib with werkzeug.middleware proxyfix.
werkzeug.contrib has been deprecated, this is the new location
of the proxyfix.
|
Python
|
bsd-3-clause
|
voussoir/etiquette,voussoir/etiquette,voussoir/etiquette
|
'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.contrib.fixers
import backend
backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app)
site = backend.site
Replace werkzeug.contrib with werkzeug.middleware proxyfix.
werkzeug.contrib has been deprecated, this is the new location
of the proxyfix.
|
'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.middleware.proxy_fix
import backend
backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app)
site = backend.site
|
<commit_before>'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.contrib.fixers
import backend
backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app)
site = backend.site
<commit_msg>Replace werkzeug.contrib with werkzeug.middleware proxyfix.
werkzeug.contrib has been deprecated, this is the new location
of the proxyfix.<commit_after>
|
'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.middleware.proxy_fix
import backend
backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app)
site = backend.site
|
'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.contrib.fixers
import backend
backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app)
site = backend.site
Replace werkzeug.contrib with werkzeug.middleware proxyfix.
werkzeug.contrib has been deprecated, this is the new location
of the proxyfix.'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.middleware.proxy_fix
import backend
backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app)
site = backend.site
|
<commit_before>'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.contrib.fixers
import backend
backend.site.wsgi_app = werkzeug.contrib.fixers.ProxyFix(backend.site.wsgi_app)
site = backend.site
<commit_msg>Replace werkzeug.contrib with werkzeug.middleware proxyfix.
werkzeug.contrib has been deprecated, this is the new location
of the proxyfix.<commit_after>'''
This file is the WSGI entrypoint for remote / production use.
If you are using Gunicorn, for example:
gunicorn etiquette_flask_entrypoint:site --bind "0.0.0.0:PORT" --access-logfile "-"
'''
import werkzeug.middleware.proxy_fix
import backend
backend.site.wsgi_app = werkzeug.middleware.proxy_fix.ProxyFix(backend.site.wsgi_app)
site = backend.site
|
1b8afd8a7bbc832e1394cbca29624e24d5e7d062
|
data_structures/linked_list.py
|
data_structures/linked_list.py
|
class Node(object):
def __init__(self, val=None, pointer=None):
self.val = val
self.pointer = pointer
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
|
Add basic structure and methods for linked list
|
Add basic structure and methods for linked list
|
Python
|
mit
|
sjschmidt44/python_data_structures
|
Add basic structure and methods for linked list
|
class Node(object):
def __init__(self, val=None, pointer=None):
self.val = val
self.pointer = pointer
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
|
<commit_before><commit_msg>Add basic structure and methods for linked list<commit_after>
|
class Node(object):
def __init__(self, val=None, pointer=None):
self.val = val
self.pointer = pointer
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
|
Add basic structure and methods for linked listclass Node(object):
def __init__(self, val=None, pointer=None):
self.val = val
self.pointer = pointer
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
|
<commit_before><commit_msg>Add basic structure and methods for linked list<commit_after>class Node(object):
def __init__(self, val=None, pointer=None):
self.val = val
self.pointer = pointer
class LinkedList(object):
def __init__(self, values=None, head=None):
self.head = head
self.length = 0
def size(self):
pass
def search(self):
pass
def display(self):
pass
def remove(self):
pass
def insert(self):
pass
def pop(self):
pass
|
|
601962d1a34a00c79b0e56b302a17b5673eb8168
|
etcd3/__init__.py
|
etcd3/__init__.py
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'etcdrpc',
'Etcd3Client',
'Transactions',
'client',
'Lease',
'Lock',
'Member',
)
|
Reorder '__all__' entries to respect import order
|
Reorder '__all__' entries to respect import order
|
Python
|
apache-2.0
|
kragniz/python-etcd3
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
Reorder '__all__' entries to respect import order
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'etcdrpc',
'Etcd3Client',
'Transactions',
'client',
'Lease',
'Lock',
'Member',
)
|
<commit_before>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
<commit_msg>Reorder '__all__' entries to respect import order<commit_after>
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'etcdrpc',
'Etcd3Client',
'Transactions',
'client',
'Lease',
'Lock',
'Member',
)
|
from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
Reorder '__all__' entries to respect import orderfrom __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'etcdrpc',
'Etcd3Client',
'Transactions',
'client',
'Lease',
'Lock',
'Member',
)
|
<commit_before>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'Etcd3Client',
'Lease',
'Lock',
'Member',
'Transactions',
'client',
'etcdrpc',
)
<commit_msg>Reorder '__all__' entries to respect import order<commit_after>from __future__ import absolute_import
import etcd3.etcdrpc as etcdrpc
from etcd3.client import Etcd3Client
from etcd3.client import Transactions
from etcd3.client import client
from etcd3.leases import Lease
from etcd3.locks import Lock
from etcd3.members import Member
__author__ = 'Louis Taylor'
__email__ = 'louis@kragniz.eu'
__version__ = '0.6.0'
__all__ = (
'etcdrpc',
'Etcd3Client',
'Transactions',
'client',
'Lease',
'Lock',
'Member',
)
|
ee441c445bf8a9401af045993ed4bd5c65db9eff
|
garnish/utils.py
|
garnish/utils.py
|
import sys
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
|
import sys
import textwrap
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
def wrap_paragraphs(readlines_list, textwidth=80):
""" Takes a list of strings called readlines_list and returns a single
string with lines wrapped to textwidth columns. readlines_list should
follow the format produced by calls to open().readlines(). """
def split_paragraph(readlines_list):
""" Transform the readlines_list into a nested list. Each list in the
output represents the lines of an unwrapped or improperly wrapped
paragraph. """
list_of_lists = []
para_list = []
for line in readlines_list:
if line == '\n':
list_of_lists.append(para_list)
para_list = []
else:
para_list.append(line)
list_of_lists.append(para_list)
return list_of_lists
paragraph_list = split_paragraph(readlines_list)
wrapped_list = []
for para in paragraph_list:
newlines = textwrap.wrap(''.join(para),textwidth)
wrapped_list.extend(newlines)
wrapped_list.append('\n') # Separate paragraphs
return '\n'.join(wrapped_list)
|
Add function that extends textwrap.wrap to multi-paragraph inputs.
|
Add function that extends textwrap.wrap to multi-paragraph inputs.
|
Python
|
mit
|
radicalbiscuit/garnish
|
import sys
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
Add function that extends textwrap.wrap to multi-paragraph inputs.
|
import sys
import textwrap
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
def wrap_paragraphs(readlines_list, textwidth=80):
""" Takes a list of strings called readlines_list and returns a single
string with lines wrapped to textwidth columns. readlines_list should
follow the format produced by calls to open().readlines(). """
def split_paragraph(readlines_list):
""" Transform the readlines_list into a nested list. Each list in the
output represents the lines of an unwrapped or improperly wrapped
paragraph. """
list_of_lists = []
para_list = []
for line in readlines_list:
if line == '\n':
list_of_lists.append(para_list)
para_list = []
else:
para_list.append(line)
list_of_lists.append(para_list)
return list_of_lists
paragraph_list = split_paragraph(readlines_list)
wrapped_list = []
for para in paragraph_list:
newlines = textwrap.wrap(''.join(para),textwidth)
wrapped_list.extend(newlines)
wrapped_list.append('\n') # Separate paragraphs
return '\n'.join(wrapped_list)
|
<commit_before>import sys
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
<commit_msg>Add function that extends textwrap.wrap to multi-paragraph inputs.<commit_after>
|
import sys
import textwrap
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
def wrap_paragraphs(readlines_list, textwidth=80):
""" Takes a list of strings called readlines_list and returns a single
string with lines wrapped to textwidth columns. readlines_list should
follow the format produced by calls to open().readlines(). """
def split_paragraph(readlines_list):
""" Transform the readlines_list into a nested list. Each list in the
output represents the lines of an unwrapped or improperly wrapped
paragraph. """
list_of_lists = []
para_list = []
for line in readlines_list:
if line == '\n':
list_of_lists.append(para_list)
para_list = []
else:
para_list.append(line)
list_of_lists.append(para_list)
return list_of_lists
paragraph_list = split_paragraph(readlines_list)
wrapped_list = []
for para in paragraph_list:
newlines = textwrap.wrap(''.join(para),textwidth)
wrapped_list.extend(newlines)
wrapped_list.append('\n') # Separate paragraphs
return '\n'.join(wrapped_list)
|
import sys
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
Add function that extends textwrap.wrap to multi-paragraph inputs.import sys
import textwrap
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
def wrap_paragraphs(readlines_list, textwidth=80):
""" Takes a list of strings called readlines_list and returns a single
string with lines wrapped to textwidth columns. readlines_list should
follow the format produced by calls to open().readlines(). """
def split_paragraph(readlines_list):
""" Transform the readlines_list into a nested list. Each list in the
output represents the lines of an unwrapped or improperly wrapped
paragraph. """
list_of_lists = []
para_list = []
for line in readlines_list:
if line == '\n':
list_of_lists.append(para_list)
para_list = []
else:
para_list.append(line)
list_of_lists.append(para_list)
return list_of_lists
paragraph_list = split_paragraph(readlines_list)
wrapped_list = []
for para in paragraph_list:
newlines = textwrap.wrap(''.join(para),textwidth)
wrapped_list.extend(newlines)
wrapped_list.append('\n') # Separate paragraphs
return '\n'.join(wrapped_list)
|
<commit_before>import sys
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
<commit_msg>Add function that extends textwrap.wrap to multi-paragraph inputs.<commit_after>import sys
import textwrap
def fill_template(temp, args, longname, filename, url):
"""
Takes a template string (temp) and replaces all template keywords with
information from commandline arguments.
"""
temp = temp.replace('OWNER_NAME', args.copyright_holder)
temp = temp.replace('COPYRIGHT_YEAR', args.year)
temp = temp.replace('PROGRAM_NAME', args.program_name)
temp = temp.replace('LICENSE_LONGNAME', longname)
temp = temp.replace('LICENSE_FILENAME', filename)
temp = temp.replace('LICENSE_URL', url)
return temp
def exit(bad=False):
if bad:
print 'The operation was not completed successfully.'
sys.exit(1)
else:
sys.exit(0)
def wrap_paragraphs(readlines_list, textwidth=80):
""" Takes a list of strings called readlines_list and returns a single
string with lines wrapped to textwidth columns. readlines_list should
follow the format produced by calls to open().readlines(). """
def split_paragraph(readlines_list):
""" Transform the readlines_list into a nested list. Each list in the
output represents the lines of an unwrapped or improperly wrapped
paragraph. """
list_of_lists = []
para_list = []
for line in readlines_list:
if line == '\n':
list_of_lists.append(para_list)
para_list = []
else:
para_list.append(line)
list_of_lists.append(para_list)
return list_of_lists
paragraph_list = split_paragraph(readlines_list)
wrapped_list = []
for para in paragraph_list:
newlines = textwrap.wrap(''.join(para),textwidth)
wrapped_list.extend(newlines)
wrapped_list.append('\n') # Separate paragraphs
return '\n'.join(wrapped_list)
|
dca788c815fb4375258f7ec0ec85af1c86aab70d
|
datastore/tasks.py
|
datastore/tasks.py
|
from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
|
from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings,
project_run=project_run)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
|
Add project run to project result.
|
Add project run to project result.
|
Python
|
mit
|
impactlab/oeem-energy-datastore,impactlab/oeem-energy-datastore,impactlab/oeem-energy-datastore
|
from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
Add project run to project result.
|
from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings,
project_run=project_run)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
|
<commit_before>from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
<commit_msg>Add project run to project result.<commit_after>
|
from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings,
project_run=project_run)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
|
from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
Add project run to project result.from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings,
project_run=project_run)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
|
<commit_before>from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
<commit_msg>Add project run to project result.<commit_after>from __future__ import absolute_import
import logging
import traceback
from celery import shared_task
from celery.utils.log import get_task_logger
from datastore.models import ProjectRun
logger = get_task_logger(__name__)
@shared_task
def execute_project_run(project_run_pk):
try:
project_run = ProjectRun.objects.get(pk=project_run_pk)
except ProjectRun.DoesNotExist:
logger.info("Received an invalid project_run_pk %s" % project_run_pk)
return
project_run.status = 'RUNNING'
project_run.save()
project = project_run.project
logger.info(
"Running {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
try:
project.run_meter(meter_class=project_run.meter_class,
meter_settings=project_run.meter_settings,
project_run=project_run)
project_run.status = 'SUCCESS'
logger.info(
"Successfully ran {} meter for project {}"
.format(project_run.meter_class, project.pk)
)
except:
tb = traceback.format_exc()
project_run.status = 'FAILED'
project_run.traceback = tb
logger.info(
"Failed running {} meter for project {}:\n{}"
.format(project_run.meter_class, project.pk, tb)
)
logging.error(traceback.print_exc())
project_run.save()
|
98ba04f92d5f95c363bf89c0bb937463a6f95eab
|
tests/test_main.py
|
tests/test_main.py
|
import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
expected = (
(b"hostname", bytes(hostname, "ascii")),
(b"port", bytes(str(port), "ascii")),
(b"From", b"sender@example.com"),
(b"To", b"recipient@example.com"),
(b"message", b"Subject: Hello World\n\nHi there."),
)
for expected_output, write_bytes in expected:
output = await proc.stdout.readuntil(separator=b":")
assert expected_output in output
proc.stdin.write(write_bytes + b"\n")
await proc.stdin.drain()
proc.stdin.write_eof()
await proc.stdin.drain()
return_code = await proc.wait()
assert return_code == 0
|
import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
inputs = (
bytes(hostname, "ascii"),
bytes(str(port), "ascii"),
b"sender@example.com",
b"recipient@example.com",
b"Subject: Hello World\n\nHi there.",
)
messages = (
b"SMTP server hostname [localhost]:",
b"SMTP server port [25]:",
b"From:",
b"To:",
b"Enter message, end with ^D:",
)
output, errors = await proc.communicate(input=b"\n".join(inputs))
assert errors is None
for message in messages:
assert message in output
assert proc.returncode == 0
|
Fix deadlock in subprocess pipes
|
Fix deadlock in subprocess pipes
|
Python
|
mit
|
cole/aiosmtplib
|
import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
expected = (
(b"hostname", bytes(hostname, "ascii")),
(b"port", bytes(str(port), "ascii")),
(b"From", b"sender@example.com"),
(b"To", b"recipient@example.com"),
(b"message", b"Subject: Hello World\n\nHi there."),
)
for expected_output, write_bytes in expected:
output = await proc.stdout.readuntil(separator=b":")
assert expected_output in output
proc.stdin.write(write_bytes + b"\n")
await proc.stdin.drain()
proc.stdin.write_eof()
await proc.stdin.drain()
return_code = await proc.wait()
assert return_code == 0
Fix deadlock in subprocess pipes
|
import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
inputs = (
bytes(hostname, "ascii"),
bytes(str(port), "ascii"),
b"sender@example.com",
b"recipient@example.com",
b"Subject: Hello World\n\nHi there.",
)
messages = (
b"SMTP server hostname [localhost]:",
b"SMTP server port [25]:",
b"From:",
b"To:",
b"Enter message, end with ^D:",
)
output, errors = await proc.communicate(input=b"\n".join(inputs))
assert errors is None
for message in messages:
assert message in output
assert proc.returncode == 0
|
<commit_before>import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
expected = (
(b"hostname", bytes(hostname, "ascii")),
(b"port", bytes(str(port), "ascii")),
(b"From", b"sender@example.com"),
(b"To", b"recipient@example.com"),
(b"message", b"Subject: Hello World\n\nHi there."),
)
for expected_output, write_bytes in expected:
output = await proc.stdout.readuntil(separator=b":")
assert expected_output in output
proc.stdin.write(write_bytes + b"\n")
await proc.stdin.drain()
proc.stdin.write_eof()
await proc.stdin.drain()
return_code = await proc.wait()
assert return_code == 0
<commit_msg>Fix deadlock in subprocess pipes<commit_after>
|
import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
inputs = (
bytes(hostname, "ascii"),
bytes(str(port), "ascii"),
b"sender@example.com",
b"recipient@example.com",
b"Subject: Hello World\n\nHi there.",
)
messages = (
b"SMTP server hostname [localhost]:",
b"SMTP server port [25]:",
b"From:",
b"To:",
b"Enter message, end with ^D:",
)
output, errors = await proc.communicate(input=b"\n".join(inputs))
assert errors is None
for message in messages:
assert message in output
assert proc.returncode == 0
|
import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
expected = (
(b"hostname", bytes(hostname, "ascii")),
(b"port", bytes(str(port), "ascii")),
(b"From", b"sender@example.com"),
(b"To", b"recipient@example.com"),
(b"message", b"Subject: Hello World\n\nHi there."),
)
for expected_output, write_bytes in expected:
output = await proc.stdout.readuntil(separator=b":")
assert expected_output in output
proc.stdin.write(write_bytes + b"\n")
await proc.stdin.drain()
proc.stdin.write_eof()
await proc.stdin.drain()
return_code = await proc.wait()
assert return_code == 0
Fix deadlock in subprocess pipesimport asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
inputs = (
bytes(hostname, "ascii"),
bytes(str(port), "ascii"),
b"sender@example.com",
b"recipient@example.com",
b"Subject: Hello World\n\nHi there.",
)
messages = (
b"SMTP server hostname [localhost]:",
b"SMTP server port [25]:",
b"From:",
b"To:",
b"Enter message, end with ^D:",
)
output, errors = await proc.communicate(input=b"\n".join(inputs))
assert errors is None
for message in messages:
assert message in output
assert proc.returncode == 0
|
<commit_before>import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
expected = (
(b"hostname", bytes(hostname, "ascii")),
(b"port", bytes(str(port), "ascii")),
(b"From", b"sender@example.com"),
(b"To", b"recipient@example.com"),
(b"message", b"Subject: Hello World\n\nHi there."),
)
for expected_output, write_bytes in expected:
output = await proc.stdout.readuntil(separator=b":")
assert expected_output in output
proc.stdin.write(write_bytes + b"\n")
await proc.stdin.drain()
proc.stdin.write_eof()
await proc.stdin.drain()
return_code = await proc.wait()
assert return_code == 0
<commit_msg>Fix deadlock in subprocess pipes<commit_after>import asyncio
import sys
import pytest
pytestmark = pytest.mark.asyncio()
async def test_command_line_send(smtpd_server, hostname, port):
proc = await asyncio.create_subprocess_exec(
sys.executable,
b"-m",
b"aiosmtplib",
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
)
inputs = (
bytes(hostname, "ascii"),
bytes(str(port), "ascii"),
b"sender@example.com",
b"recipient@example.com",
b"Subject: Hello World\n\nHi there.",
)
messages = (
b"SMTP server hostname [localhost]:",
b"SMTP server port [25]:",
b"From:",
b"To:",
b"Enter message, end with ^D:",
)
output, errors = await proc.communicate(input=b"\n".join(inputs))
assert errors is None
for message in messages:
assert message in output
assert proc.returncode == 0
|
659321fafc7379f32f45f86eb4c366de884cce35
|
tests/test_ping.py
|
tests/test_ping.py
|
from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
|
from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data', 'handler': 'foo'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
assert data['data']['handler'] == 'foo'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
|
Add a failing test that demonstrates wrong handling of data
|
[tests] Add a failing test that demonstrates wrong handling of data
The ping handler adds the data directly to the response_kwargs,
allowing internal kwargs to be overwritten.
`send_message()` and `build_message()` should not accept
`**additional_data`, but `additional_data: dict = None` instead.
|
Python
|
mit
|
axsemantics/rohrpost,axsemantics/rohrpost
|
from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
[tests] Add a failing test that demonstrates wrong handling of data
The ping handler adds the data directly to the response_kwargs,
allowing internal kwargs to be overwritten.
`send_message()` and `build_message()` should not accept
`**additional_data`, but `additional_data: dict = None` instead.
|
from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data', 'handler': 'foo'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
assert data['data']['handler'] == 'foo'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
|
<commit_before>from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
<commit_msg>[tests] Add a failing test that demonstrates wrong handling of data
The ping handler adds the data directly to the response_kwargs,
allowing internal kwargs to be overwritten.
`send_message()` and `build_message()` should not accept
`**additional_data`, but `additional_data: dict = None` instead.<commit_after>
|
from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data', 'handler': 'foo'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
assert data['data']['handler'] == 'foo'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
|
from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
[tests] Add a failing test that demonstrates wrong handling of data
The ping handler adds the data directly to the response_kwargs,
allowing internal kwargs to be overwritten.
`send_message()` and `build_message()` should not accept
`**additional_data`, but `additional_data: dict = None` instead.from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data', 'handler': 'foo'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
assert data['data']['handler'] == 'foo'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
|
<commit_before>from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
<commit_msg>[tests] Add a failing test that demonstrates wrong handling of data
The ping handler adds the data directly to the response_kwargs,
allowing internal kwargs to be overwritten.
`send_message()` and `build_message()` should not accept
`**additional_data`, but `additional_data: dict = None` instead.<commit_after>from rohrpost.handlers import handle_ping
def test_ping(message):
handle_ping(message, request={'id': 123})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert 'data' not in data
def test_ping_additional_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': {'some': 'data', 'other': 'data', 'handler': 'foo'}
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['some'] == 'data'
assert data['data']['handler'] == 'foo'
def test_ping_additional_non_dict_data(message):
handle_ping(message, request={
'id': 123,
'type': 'ping',
'data': 1
})
assert message.reply_channel.closed is False
assert len(message.reply_channel.data) == 1
data = message.reply_channel.data[-1]
assert data['id'] == 123
assert data['type'] == 'pong'
assert data['data']['data'] == 1
|
8830e4e86a9b9e807017a55da5c4faab76e01b69
|
tests/test_util.py
|
tests/test_util.py
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
import pytest
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
def test_unknown(self):
with pytest.raises(RuntimeError):
time_convert("5u")
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
Cover unknown time format case
|
Cover unknown time format case
|
Python
|
mit
|
CodersOfTheNight/verata
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
Cover unknown time format case
|
import pytest
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
def test_unknown(self):
with pytest.raises(RuntimeError):
time_convert("5u")
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
<commit_before>from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
<commit_msg>Cover unknown time format case<commit_after>
|
import pytest
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
def test_unknown(self):
with pytest.raises(RuntimeError):
time_convert("5u")
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
Cover unknown time format caseimport pytest
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
def test_unknown(self):
with pytest.raises(RuntimeError):
time_convert("5u")
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
<commit_before>from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
<commit_msg>Cover unknown time format case<commit_after>import pytest
from grazer.util import time_convert, grouper
class TestTimeConvert(object):
def test_seconds(self):
assert time_convert("10s") == 10
def test_minutes(self):
assert time_convert("2m") == 120
def test_hours(self):
assert time_convert("3h") == 3 * 60 * 60
def test_unknown(self):
with pytest.raises(RuntimeError):
time_convert("5u")
class TestGrouper(object):
def test_simple_seq(self):
seq = range(0, 10)
result = list(grouper(2, seq))
assert len(result) == 5
def test_odd_seq(self):
seq = range(0, 10)
result = list(grouper(3, seq))
assert len(result) == 4
assert result[-1] == (9, None, None)
|
8dc7035d10f648489bbdfd3087a65f0355e1a72c
|
tests/test_mapping.py
|
tests/test_mapping.py
|
from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
|
from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter_preserves_keys(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
|
Use a more descriptive test case name
|
Use a more descriptive test case name
|
Python
|
mit
|
eugene-eeo/prudent
|
from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
Use a more descriptive test case name
|
from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter_preserves_keys(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
|
<commit_before>from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
<commit_msg>Use a more descriptive test case name<commit_after>
|
from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter_preserves_keys(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
|
from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
Use a more descriptive test case namefrom unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter_preserves_keys(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
|
<commit_before>from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
<commit_msg>Use a more descriptive test case name<commit_after>from unittest import TestCase
from prudent.mapping import Mapping
class MappingTest(TestCase):
def setUp(self):
self.mapping = Mapping([(1, 2), (2, 3), (3, 4)])
def test_iter_preserves_keys(self):
keys = [1, 2, 3]
for _ in range(2):
assert list(self.mapping) == keys
def test_contains(self):
assert 1 in self.mapping
assert 1 in self.mapping
assert 3 in self.mapping
def test_getitem(self):
assert self.mapping[1] == 2
assert self.mapping[3] == 4
assert self.mapping[2] == 3
def test_len(self):
assert len(self.mapping) == 0
self.mapping[3]
assert len(self.mapping) == 3
|
6848eb77af8dc274f881e5895e541923f34e5354
|
elections/admin.py
|
elections/admin.py
|
from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(VacantPosition)
admin.site.register(Vote)
|
from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
class VacantPositionInline(admin.StackedInline):
model = VacantPosition
extra = 0
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
inlines = [VacantPositionInline,]
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
|
Move VacantPosition to Election as an inline. Remove Vote.
|
Move VacantPosition to Election as an inline. Remove Vote.
|
Python
|
mit
|
QSchulz/sportassociation,QSchulz/sportassociation,QSchulz/sportassociation
|
from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(VacantPosition)
admin.site.register(Vote)
Move VacantPosition to Election as an inline. Remove Vote.
|
from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
class VacantPositionInline(admin.StackedInline):
model = VacantPosition
extra = 0
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
inlines = [VacantPositionInline,]
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
|
<commit_before>from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(VacantPosition)
admin.site.register(Vote)
<commit_msg>Move VacantPosition to Election as an inline. Remove Vote.<commit_after>
|
from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
class VacantPositionInline(admin.StackedInline):
model = VacantPosition
extra = 0
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
inlines = [VacantPositionInline,]
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
|
from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(VacantPosition)
admin.site.register(Vote)
Move VacantPosition to Election as an inline. Remove Vote.from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
class VacantPositionInline(admin.StackedInline):
model = VacantPosition
extra = 0
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
inlines = [VacantPositionInline,]
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
|
<commit_before>from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(VacantPosition)
admin.site.register(Vote)
<commit_msg>Move VacantPosition to Election as an inline. Remove Vote.<commit_after>from django.contrib import admin
from .models import (Election, VacantPosition, Candidature, Vote)
class VacantPositionInline(admin.StackedInline):
model = VacantPosition
extra = 0
@admin.register(Election)
class ElectionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('title',)}
inlines = [VacantPositionInline,]
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Candidature)
class CandidatureAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
|
69b33f8f87b6dfc0fbaf96eca25c02535c9e09e7
|
src/test/almost_equal.py
|
src/test/almost_equal.py
|
def datetime_almost_equal(datetime1, datetime2, seconds=60):
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
from datetime import datetime
import pytz
def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
if not(datetime1.tzinfo):
datetime1 = pytz.utc.localize(datetime1)
datetime1 = datetime1.astimezone(pytz.utc)
if not(datetime2.tzinfo):
datetime2 = pytz.utc.localize(datetime2)
datetime2 = datetime2.astimezone(pytz.utc)
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
Make sure all datetimes are UTC
|
Make sure all datetimes are UTC
|
Python
|
apache-2.0
|
sffjunkie/astral,sffjunkie/astral
|
def datetime_almost_equal(datetime1, datetime2, seconds=60):
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
Make sure all datetimes are UTC
|
from datetime import datetime
import pytz
def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
if not(datetime1.tzinfo):
datetime1 = pytz.utc.localize(datetime1)
datetime1 = datetime1.astimezone(pytz.utc)
if not(datetime2.tzinfo):
datetime2 = pytz.utc.localize(datetime2)
datetime2 = datetime2.astimezone(pytz.utc)
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
<commit_before>def datetime_almost_equal(datetime1, datetime2, seconds=60):
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
<commit_msg>Make sure all datetimes are UTC<commit_after>
|
from datetime import datetime
import pytz
def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
if not(datetime1.tzinfo):
datetime1 = pytz.utc.localize(datetime1)
datetime1 = datetime1.astimezone(pytz.utc)
if not(datetime2.tzinfo):
datetime2 = pytz.utc.localize(datetime2)
datetime2 = datetime2.astimezone(pytz.utc)
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
def datetime_almost_equal(datetime1, datetime2, seconds=60):
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
Make sure all datetimes are UTCfrom datetime import datetime
import pytz
def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
if not(datetime1.tzinfo):
datetime1 = pytz.utc.localize(datetime1)
datetime1 = datetime1.astimezone(pytz.utc)
if not(datetime2.tzinfo):
datetime2 = pytz.utc.localize(datetime2)
datetime2 = datetime2.astimezone(pytz.utc)
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
<commit_before>def datetime_almost_equal(datetime1, datetime2, seconds=60):
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
<commit_msg>Make sure all datetimes are UTC<commit_after>from datetime import datetime
import pytz
def datetime_almost_equal(datetime1:datetime, datetime2:datetime, seconds:int=60):
if not(datetime1.tzinfo):
datetime1 = pytz.utc.localize(datetime1)
datetime1 = datetime1.astimezone(pytz.utc)
if not(datetime2.tzinfo):
datetime2 = pytz.utc.localize(datetime2)
datetime2 = datetime2.astimezone(pytz.utc)
dd = datetime1 - datetime2
sd = (dd.days * 24 * 60 * 60) + dd.seconds
return abs(sd) <= seconds
|
0f593824cae8d0ae3a888194184362493222628e
|
jsonsempai.py
|
jsonsempai.py
|
import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
|
import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{name}.json'.format(name=name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
|
Fix python 2.6 default string formatting
|
Fix python 2.6 default string formatting
|
Python
|
mit
|
kragniz/json-sempai
|
import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
Fix python 2.6 default string formatting
|
import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{name}.json'.format(name=name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
|
<commit_before>import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
<commit_msg>Fix python 2.6 default string formatting<commit_after>
|
import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{name}.json'.format(name=name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
|
import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
Fix python 2.6 default string formattingimport imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{name}.json'.format(name=name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
|
<commit_before>import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{}.json'.format(name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
<commit_msg>Fix python 2.6 default string formatting<commit_after>import imp
import json
import os
import sys
class Dot(dict):
def __init__(self, d):
super(dict, self).__init__()
for k, v in iter(d.items()):
if isinstance(v, dict):
self[k] = Dot(v)
else:
self[k] = v
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError("'{}'".format(attr))
__setattr__ = dict.__setitem__
__delattr__ = dict.__delitem__
class SempaiLoader(object):
def find_module(self, name, path=None):
for d in sys.path:
self.json_path = os.path.join(d, '{name}.json'.format(name=name))
if os.path.isfile(self.json_path):
return self
return None
def load_module(self, name):
mod = imp.new_module(name)
mod.__file__ = self.json_path
mod.__loader__ = self
try:
with open(self.json_path) as f:
d = json.load(f)
except ValueError:
raise ImportError(
'"{}" does not contain valid json.'.format(self.json_path))
except:
raise ImportError(
'Could not open "{}".'.format(self.json_path))
mod.__dict__.update(d)
for k, i in mod.__dict__.items():
if isinstance(i, dict):
mod.__dict__[k] = Dot(i)
return mod
sys.meta_path.append(SempaiLoader())
|
0c63a45755eb492192207dcf0561231183634869
|
internal/util.py
|
internal/util.py
|
__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S",
level=(logging.DEBUG if "--debug" in sys.argv else logging.INFO))
def log(message, level=logging.INFO):
logging.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
|
__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S")
logger = logging.getLogger("Web")
logger.setLevel(logging.DEBUG if "--debug" in sys.argv else logging.INFO)
def log(message, level=logging.INFO):
logger.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
|
Cut down on logging spam from dict2xml :U
|
Cut down on logging spam from dict2xml :U
|
Python
|
artistic-2.0
|
Statik-Metrics/Statik-API
|
__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S",
level=(logging.DEBUG if "--debug" in sys.argv else logging.INFO))
def log(message, level=logging.INFO):
logging.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
Cut down on logging spam from dict2xml :U
|
__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S")
logger = logging.getLogger("Web")
logger.setLevel(logging.DEBUG if "--debug" in sys.argv else logging.INFO)
def log(message, level=logging.INFO):
logger.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
|
<commit_before>__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S",
level=(logging.DEBUG if "--debug" in sys.argv else logging.INFO))
def log(message, level=logging.INFO):
logging.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
<commit_msg>Cut down on logging spam from dict2xml :U<commit_after>
|
__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S")
logger = logging.getLogger("Web")
logger.setLevel(logging.DEBUG if "--debug" in sys.argv else logging.INFO)
def log(message, level=logging.INFO):
logger.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
|
__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S",
level=(logging.DEBUG if "--debug" in sys.argv else logging.INFO))
def log(message, level=logging.INFO):
logging.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
Cut down on logging spam from dict2xml :U__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S")
logger = logging.getLogger("Web")
logger.setLevel(logging.DEBUG if "--debug" in sys.argv else logging.INFO)
def log(message, level=logging.INFO):
logger.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
|
<commit_before>__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S",
level=(logging.DEBUG if "--debug" in sys.argv else logging.INFO))
def log(message, level=logging.INFO):
logging.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
<commit_msg>Cut down on logging spam from dict2xml :U<commit_after>__author__ = 'Gareth Coles'
import logging
import sys
logging.basicConfig(
format="%(asctime)s | %(levelname)8s | %(message)s",
datefmt="%d %b %Y - %H:%M:%S")
logger = logging.getLogger("Web")
logger.setLevel(logging.DEBUG if "--debug" in sys.argv else logging.INFO)
def log(message, level=logging.INFO):
logger.log(level, message)
def log_request(request, message, level=logging.INFO):
ip = request.remote_addr
log("[{ip}] {message}".format(ip=ip, message=message), level)
|
088f3dfe9bee2b7004bfdf51a6ce224783245c4e
|
chess/simplemove/movediff.py
|
chess/simplemove/movediff.py
|
import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[0])-int(from_location[0]), int(to_location[1]) - int(from_location[1]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)
|
import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[1])-int(from_location[1]),
int(to_location[0]) - int(from_location[0]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)
|
Change order of diff to be x,y instead of y,x
|
Change order of diff to be x,y instead of y,x
|
Python
|
apache-2.0
|
nivm/learningchess,nivm/learningchess
|
import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[0])-int(from_location[0]), int(to_location[1]) - int(from_location[1]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)Change order of diff to be x,y instead of y,x
|
import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[1])-int(from_location[1]),
int(to_location[0]) - int(from_location[0]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)
|
<commit_before>import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[0])-int(from_location[0]), int(to_location[1]) - int(from_location[1]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)<commit_msg>Change order of diff to be x,y instead of y,x<commit_after>
|
import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[1])-int(from_location[1]),
int(to_location[0]) - int(from_location[0]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)
|
import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[0])-int(from_location[0]), int(to_location[1]) - int(from_location[1]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)Change order of diff to be x,y instead of y,ximport re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[1])-int(from_location[1]),
int(to_location[0]) - int(from_location[0]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)
|
<commit_before>import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[0])-int(from_location[0]), int(to_location[1]) - int(from_location[1]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)<commit_msg>Change order of diff to be x,y instead of y,x<commit_after>import re,sys
def main(argv):
for line in sys.stdin:
piece,from_location,to_location = line.split("\t")
from_location = from_location.split(",")
to_location = to_location.split(",")
print "%s\t%s\t%s" % (piece, int(to_location[1])-int(from_location[1]),
int(to_location[0]) - int(from_location[0]) )
if __name__ == '__main__':
sys.stderr.write("Calculate move diff: %s\n"%sys.argv)
main(sys.argv)
|
f4e5f0587c1214433de46fc5d86e77d849fdddc4
|
src/robot/utils/robotio.py
|
src/robot/utils/robotio.py
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# TODO: Consider removing this and using u'' or `from __future__ import
# unicode_literals` everywhere.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# These streams require written text to be Unicode. We don't want to add
# `u` prefix to all our strings in Python 2, and cannot really use
# `unicode_literals` either because many other Python 2 APIs accept only
# byte strings.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
|
Replace TODO with comment explaining why it wasn't possible
|
Replace TODO with comment explaining why it wasn't possible
|
Python
|
apache-2.0
|
alexandrul-ci/robotframework,synsun/robotframework,jaloren/robotframework,snyderr/robotframework,joongh/robotframework,HelioGuilherme66/robotframework,HelioGuilherme66/robotframework,alexandrul-ci/robotframework,synsun/robotframework,synsun/robotframework,snyderr/robotframework,joongh/robotframework,synsun/robotframework,alexandrul-ci/robotframework,snyderr/robotframework,alexandrul-ci/robotframework,jaloren/robotframework,synsun/robotframework,jaloren/robotframework,joongh/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework,snyderr/robotframework,robotframework/robotframework,joongh/robotframework,jaloren/robotframework,joongh/robotframework,snyderr/robotframework,robotframework/robotframework,jaloren/robotframework,alexandrul-ci/robotframework
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# TODO: Consider removing this and using u'' or `from __future__ import
# unicode_literals` everywhere.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
Replace TODO with comment explaining why it wasn't possible
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# These streams require written text to be Unicode. We don't want to add
# `u` prefix to all our strings in Python 2, and cannot really use
# `unicode_literals` either because many other Python 2 APIs accept only
# byte strings.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
|
<commit_before># Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# TODO: Consider removing this and using u'' or `from __future__ import
# unicode_literals` everywhere.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
<commit_msg>Replace TODO with comment explaining why it wasn't possible<commit_after>
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# These streams require written text to be Unicode. We don't want to add
# `u` prefix to all our strings in Python 2, and cannot really use
# `unicode_literals` either because many other Python 2 APIs accept only
# byte strings.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
|
# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# TODO: Consider removing this and using u'' or `from __future__ import
# unicode_literals` everywhere.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
Replace TODO with comment explaining why it wasn't possible# Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# These streams require written text to be Unicode. We don't want to add
# `u` prefix to all our strings in Python 2, and cannot really use
# `unicode_literals` either because many other Python 2 APIs accept only
# byte strings.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
|
<commit_before># Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# TODO: Consider removing this and using u'' or `from __future__ import
# unicode_literals` everywhere.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
<commit_msg>Replace TODO with comment explaining why it wasn't possible<commit_after># Copyright 2008-2015 Nokia Solutions and Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
from .platform import PY3
def file_writer(path=None, encoding='UTF-8', newline=None):
if path:
f = io.open(path, 'w', encoding=encoding, newline=newline)
else:
f = io.StringIO(newline=newline)
if PY3:
return f
# These streams require written text to be Unicode. We don't want to add
# `u` prefix to all our strings in Python 2, and cannot really use
# `unicode_literals` either because many other Python 2 APIs accept only
# byte strings.
write = f.write
f.write = lambda text: write(unicode(text))
return f
def binary_file_writer(path=None):
if path:
return io.open(path, 'wb')
f = io.BytesIO()
getvalue = f.getvalue
f.getvalue = lambda encoding='UTF-8': getvalue().decode(encoding)
return f
|
ffdf13c8217f3a785fe8768697b3e3da4b6ff9cb
|
cherrypy/py3util.py
|
cherrypy/py3util.py
|
"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
Use builtin sorted, reversed if available.
|
Use builtin sorted, reversed if available.
|
Python
|
bsd-3-clause
|
cherrypy/cheroot,Safihre/cherrypy,cherrypy/cherrypy,Safihre/cherrypy,cherrypy/cherrypy
|
"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
Use builtin sorted, reversed if available.
|
"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
<commit_before>"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
<commit_msg>Use builtin sorted, reversed if available.<commit_after>
|
"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
Use builtin sorted, reversed if available."""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
<commit_before>"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
<commit_msg>Use builtin sorted, reversed if available.<commit_after>"""
A simple module that helps unify the code between a python2 and python3 library.
"""
import sys
try:
sorted = sorted
except NameError:
def sorted(lst):
newlst = list(lst)
newlst.sort()
return newlst
try:
reversed = reversed
except NameError:
def reversed(lst):
newlst = list(lst)
return iter(newlst[::-1])
|
a978d79bf1a7c9ec9b38841c3e809bd2dc52f3c0
|
corehq/apps/commtrack/admin.py
|
corehq/apps/commtrack/admin.py
|
from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'case_id',
'product_id',
'last_modified_date'
]
admin.site.register(StockState, StockStateAdmin)
|
from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'last_modified_date'
]
search_fields = ["case_id", "product_id"]
admin.site.register(StockState, StockStateAdmin)
|
Use product/case id's in search rather than filter
|
Use product/case id's in search rather than filter
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq
|
from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'case_id',
'product_id',
'last_modified_date'
]
admin.site.register(StockState, StockStateAdmin)
Use product/case id's in search rather than filter
|
from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'last_modified_date'
]
search_fields = ["case_id", "product_id"]
admin.site.register(StockState, StockStateAdmin)
|
<commit_before>from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'case_id',
'product_id',
'last_modified_date'
]
admin.site.register(StockState, StockStateAdmin)
<commit_msg>Use product/case id's in search rather than filter<commit_after>
|
from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'last_modified_date'
]
search_fields = ["case_id", "product_id"]
admin.site.register(StockState, StockStateAdmin)
|
from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'case_id',
'product_id',
'last_modified_date'
]
admin.site.register(StockState, StockStateAdmin)
Use product/case id's in search rather than filterfrom django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'last_modified_date'
]
search_fields = ["case_id", "product_id"]
admin.site.register(StockState, StockStateAdmin)
|
<commit_before>from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'case_id',
'product_id',
'last_modified_date'
]
admin.site.register(StockState, StockStateAdmin)
<commit_msg>Use product/case id's in search rather than filter<commit_after>from django.contrib import admin
from .models import StockState
class StockStateAdmin(admin.ModelAdmin):
model = StockState
list_display = [
'section_id',
'case_id',
'product_id',
'stock_on_hand',
'daily_consumption',
'last_modified_date'
]
list_filter = [
'section_id',
'last_modified_date'
]
search_fields = ["case_id", "product_id"]
admin.site.register(StockState, StockStateAdmin)
|
bae4f53d126ebfe61ba0e10aa8af6eb573931ec3
|
mysite/data_2015_fall/models.py
|
mysite/data_2015_fall/models.py
|
from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = IntegerProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
|
from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = StringProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
|
Fix bug in class Article, volume should be string type
|
Fix bug in class Article, volume should be string type
|
Python
|
unlicense
|
cmusv-sc/DIWD-Team4-Wei-Lin-Tsai,cmusv-sc/DIWD-Team4-Wei-Lin-Tsai,cmusv-sc/DIWD-Team4-Wei-Lin-Tsai,cmusv-sc/DIWD-Team4-Wei-Lin-Tsai
|
from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = IntegerProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
Fix bug in class Article, volume should be string type
|
from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = StringProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
|
<commit_before>from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = IntegerProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
<commit_msg>Fix bug in class Article, volume should be string type<commit_after>
|
from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = StringProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
|
from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = IntegerProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
Fix bug in class Article, volume should be string typefrom neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = StringProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
|
<commit_before>from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = IntegerProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
<commit_msg>Fix bug in class Article, volume should be string type<commit_after>from neomodel import (StructuredNode, StringProperty, IntegerProperty, ArrayProperty,
RelationshipTo, RelationshipFrom)
# Create your models here.
class Article(StructuredNode):
title = StringProperty()
journal = StringProperty()
year = IntegerProperty()
volume = StringProperty()
authors = RelationshipFrom('Author', 'AUTHORED')
class Author(StructuredNode):
name = StringProperty()
articles = RelationshipTo('Article', 'AUTHORED')
def __str__(self):
# Call str function here since sometimes the object might not
# found in the database...
# Python sucks
return "<Author: " + str(self.name) + ">"
def __repr__(self):
return "<Author: " + repr(self.name) + ">"
def __hash__(self):
"""
We use name of the author as the hash value
"""
return hash(self.name)
def __cmp__(self, other):
return cmp(self.name, other.name)
def toDict(self):
return {
"name": self.name
}
|
2c2295ad42fe0c340aa8ff3046bbfe16ac784b66
|
csunplugged/config/__init__.py
|
csunplugged/config/__init__.py
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.5"
__version_english__ = "2.0 Alpha 5"
|
Increment version number to 2.0.0-alpha.5
|
Increment version number to 2.0.0-alpha.5
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
Increment version number to 2.0.0-alpha.5
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.5"
__version_english__ = "2.0 Alpha 5"
|
<commit_before>"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
<commit_msg>Increment version number to 2.0.0-alpha.5<commit_after>
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.5"
__version_english__ = "2.0 Alpha 5"
|
"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
Increment version number to 2.0.0-alpha.5"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.5"
__version_english__ = "2.0 Alpha 5"
|
<commit_before>"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.4"
__version_english__ = "2.0 Alpha 4"
<commit_msg>Increment version number to 2.0.0-alpha.5<commit_after>"""Module for Django system configuration."""
__version__ = "2.0.0-alpha.5"
__version_english__ = "2.0 Alpha 5"
|
1e73195e33c384605072e36ac1551bd6d67ba7cb
|
QGL/BasicSequences/__init__.py
|
QGL/BasicSequences/__init__.py
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
from itertools import product
import operator
from ..PulsePrimitives import Id, X
def create_cal_seqs(qubits, numCals):
"""
Helper function to create a set of calibration sequences.
"""
calSet = [Id, X]
calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
|
Add a helper function to create calibration sequences.
|
Add a helper function to create calibration sequences.
|
Python
|
apache-2.0
|
Plourde-Research-Lab/PyQLab,calebjordan/PyQLab,BBN-Q/PyQLab,rmcgurrin/PyQLab
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBTAdd a helper function to create calibration sequences.
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
from itertools import product
import operator
from ..PulsePrimitives import Id, X
def create_cal_seqs(qubits, numCals):
"""
Helper function to create a set of calibration sequences.
"""
calSet = [Id, X]
calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
|
<commit_before>from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT<commit_msg>Add a helper function to create calibration sequences.<commit_after>
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
from itertools import product
import operator
from ..PulsePrimitives import Id, X
def create_cal_seqs(qubits, numCals):
"""
Helper function to create a set of calibration sequences.
"""
calSet = [Id, X]
calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
|
from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBTAdd a helper function to create calibration sequences.from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
from itertools import product
import operator
from ..PulsePrimitives import Id, X
def create_cal_seqs(qubits, numCals):
"""
Helper function to create a set of calibration sequences.
"""
calSet = [Id, X]
calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
|
<commit_before>from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT<commit_msg>Add a helper function to create calibration sequences.<commit_after>from RabiAmp import RabiAmp
from Ramsey import Ramsey
from FlipFlop import FlipFlop
from SPAM import SPAM
from RB import SingleQubitRB, SingleQubitRB_AC, SingleQubitRBT
from itertools import product
import operator
from ..PulsePrimitives import Id, X
def create_cal_seqs(qubits, numCals):
"""
Helper function to create a set of calibration sequences.
"""
calSet = [Id, X]
calSeqs = [reduce(operator.mul, [p(q) for p,q in zip(pulseSet, qubits)]) for pulseSet in product(calSet, repeat=len(qubits))]
return reduce(operator.add, [[[seq]]*numCals for seq in calSeqs])
|
519aff5c44c6801c44981b059654e598c6d8db49
|
second/blog/models.py
|
second/blog/models.py
|
from __future__ import unicode_literals
from django.db import models
# Create your models here.
|
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
Create Post model in model.py
|
Create Post model in model.py
|
Python
|
mit
|
ugaliguy/Django-Tutorial-Projects,ugaliguy/Django-Tutorial-Projects
|
from __future__ import unicode_literals
from django.db import models
# Create your models here.
Create Post model in model.py
|
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
<commit_before>from __future__ import unicode_literals
from django.db import models
# Create your models here.
<commit_msg>Create Post model in model.py<commit_after>
|
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
from __future__ import unicode_literals
from django.db import models
# Create your models here.
Create Post model in model.pyfrom __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
<commit_before>from __future__ import unicode_literals
from django.db import models
# Create your models here.
<commit_msg>Create Post model in model.py<commit_after>from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Post(models.Model):
author = models.ForeignKey('auth.User')
title = models.CharField(max_length=200)
text = models.TextField()
created_date = models.DateTimeField(default=timezone.now)
published_date = models.DateTimeField(blank=True, null=True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
|
f59adf7887d26c09257b16438a2d920861be3f33
|
eventtools/tests/_inject_app.py
|
eventtools/tests/_inject_app.py
|
from django.test import TestCase
from django.conf import settings
from django.db.models.loading import load_app
from django.core.management import call_command
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconf
|
from django.db.models.loading import load_app
from django.conf import settings
from django.core.management import call_command
from django.template.loaders import app_directories
from django.template import loader
from django.test import TestCase
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
self._old_template_loaders = settings.TEMPLATE_LOADERS
loaders = list(settings.TEMPLATE_LOADERS)
try:
loaders.remove('django.template.loaders.filesystem.Loader')
settings.TEMPLATE_LOADERS = loaders
self._refresh_cache()
except ValueError:
pass
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconf
settings.TEMPLATE_LOADERS = self._old_template_loaders
self._refresh_cache()
def _refresh_cache(self):
reload(app_directories)
loader.template_source_loaders = None
|
Disable loading templates from project templates (use only the app ones). Makes all the views tests pass when ran as part of the suite of a larger project like NFSA. (Eventually, eventtools should just use testtools, this functionality is built in there)
|
Disable loading templates from project templates (use only the app ones). Makes all the views tests pass when ran as part of the suite of a larger project like NFSA. (Eventually, eventtools should just use testtools, this functionality is built in there)
|
Python
|
bsd-3-clause
|
ixc/glamkit-eventtools,ixc/glamkit-eventtools
|
from django.test import TestCase
from django.conf import settings
from django.db.models.loading import load_app
from django.core.management import call_command
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconfDisable loading templates from project templates (use only the app ones). Makes all the views tests pass when ran as part of the suite of a larger project like NFSA. (Eventually, eventtools should just use testtools, this functionality is built in there)
|
from django.db.models.loading import load_app
from django.conf import settings
from django.core.management import call_command
from django.template.loaders import app_directories
from django.template import loader
from django.test import TestCase
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
self._old_template_loaders = settings.TEMPLATE_LOADERS
loaders = list(settings.TEMPLATE_LOADERS)
try:
loaders.remove('django.template.loaders.filesystem.Loader')
settings.TEMPLATE_LOADERS = loaders
self._refresh_cache()
except ValueError:
pass
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconf
settings.TEMPLATE_LOADERS = self._old_template_loaders
self._refresh_cache()
def _refresh_cache(self):
reload(app_directories)
loader.template_source_loaders = None
|
<commit_before>from django.test import TestCase
from django.conf import settings
from django.db.models.loading import load_app
from django.core.management import call_command
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconf<commit_msg>Disable loading templates from project templates (use only the app ones). Makes all the views tests pass when ran as part of the suite of a larger project like NFSA. (Eventually, eventtools should just use testtools, this functionality is built in there)<commit_after>
|
from django.db.models.loading import load_app
from django.conf import settings
from django.core.management import call_command
from django.template.loaders import app_directories
from django.template import loader
from django.test import TestCase
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
self._old_template_loaders = settings.TEMPLATE_LOADERS
loaders = list(settings.TEMPLATE_LOADERS)
try:
loaders.remove('django.template.loaders.filesystem.Loader')
settings.TEMPLATE_LOADERS = loaders
self._refresh_cache()
except ValueError:
pass
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconf
settings.TEMPLATE_LOADERS = self._old_template_loaders
self._refresh_cache()
def _refresh_cache(self):
reload(app_directories)
loader.template_source_loaders = None
|
from django.test import TestCase
from django.conf import settings
from django.db.models.loading import load_app
from django.core.management import call_command
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconfDisable loading templates from project templates (use only the app ones). Makes all the views tests pass when ran as part of the suite of a larger project like NFSA. (Eventually, eventtools should just use testtools, this functionality is built in there)from django.db.models.loading import load_app
from django.conf import settings
from django.core.management import call_command
from django.template.loaders import app_directories
from django.template import loader
from django.test import TestCase
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
self._old_template_loaders = settings.TEMPLATE_LOADERS
loaders = list(settings.TEMPLATE_LOADERS)
try:
loaders.remove('django.template.loaders.filesystem.Loader')
settings.TEMPLATE_LOADERS = loaders
self._refresh_cache()
except ValueError:
pass
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconf
settings.TEMPLATE_LOADERS = self._old_template_loaders
self._refresh_cache()
def _refresh_cache(self):
reload(app_directories)
loader.template_source_loaders = None
|
<commit_before>from django.test import TestCase
from django.conf import settings
from django.db.models.loading import load_app
from django.core.management import call_command
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconf<commit_msg>Disable loading templates from project templates (use only the app ones). Makes all the views tests pass when ran as part of the suite of a larger project like NFSA. (Eventually, eventtools should just use testtools, this functionality is built in there)<commit_after>from django.db.models.loading import load_app
from django.conf import settings
from django.core.management import call_command
from django.template.loaders import app_directories
from django.template import loader
from django.test import TestCase
from _fixture import fixture
APP_NAME = 'eventtools.tests.eventtools_testapp'
class TestCaseWithApp(TestCase):
"""Make sure to call super(..).setUp and tearDown on subclasses"""
def setUp(self):
self.__class__.__module__ = self.__class__.__name__
self.old_INSTALLED_APPS = settings.INSTALLED_APPS
settings.INSTALLED_APPS += [APP_NAME]
self._old_root_urlconf = settings.ROOT_URLCONF
settings.ROOT_URLCONF = '%s.urls' % APP_NAME
load_app(APP_NAME)
call_command('flush', verbosity=0, interactive=False)
call_command('syncdb', verbosity=0, interactive=False)
self.ae = self.assertEqual
self._old_template_loaders = settings.TEMPLATE_LOADERS
loaders = list(settings.TEMPLATE_LOADERS)
try:
loaders.remove('django.template.loaders.filesystem.Loader')
settings.TEMPLATE_LOADERS = loaders
self._refresh_cache()
except ValueError:
pass
fixture(self)
def tearDown(self):
settings.INSTALLED_APPS = self.old_INSTALLED_APPS
settings.ROOT_URLCONF = self._old_root_urlconf
settings.TEMPLATE_LOADERS = self._old_template_loaders
self._refresh_cache()
def _refresh_cache(self):
reload(app_directories)
loader.template_source_loaders = None
|
155f53100148ffd09e9e0e0f1f9de073974ea97b
|
osgtest/tests/test_89_condor.py
|
osgtest/tests/test_89_condor.py
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
Use skip_ok_unless instead of a comparison against 'False'
|
Use skip_ok_unless instead of a comparison against 'False'
|
Python
|
apache-2.0
|
efajardo/osg-test,efajardo/osg-test
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
Use skip_ok_unless instead of a comparison against 'False'
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
<commit_before>import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
<commit_msg>Use skip_ok_unless instead of a comparison against 'False'<commit_after>
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
Use skip_ok_unless instead of a comparison against 'False'import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
<commit_before>import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_if(core.state['condor.started-service'] == False, 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
<commit_msg>Use skip_ok_unless instead of a comparison against 'False'<commit_after>import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.service as service
import osgtest.library.osgunittest as osgunittest
class TestStopCondor(osgunittest.OSGTestCase):
def test_01_stop_condor(self):
core.skip_ok_unless_installed('condor')
self.skip_ok_unless(core.state['condor.started-service'], 'did not start server')
service.check_stop('condor')
files.restore(core.config['condor.personal_condor'], 'condor')
core.state['condor.running-service'] = False
|
c26a2056273fca15f0505b448709e056898ffd7d
|
src/targetsomstandalone.py
|
src/targetsomstandalone.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
driver.exe_name = 'som'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
if driver.config.translation.jit:
driver.exe_name = 'RPySOM-jit'
else:
driver.exe_name = 'RPySOM-no-jit'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
Change binary name, and encode whether it uses a JIT compiler or not
|
Change binary name, and encode whether it uses a JIT compiler or not
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
|
Python
|
mit
|
smarr/PySOM,smarr/PySOM,SOM-st/RTruffleSOM,SOM-st/RTruffleSOM,smarr/RTruffleSOM,smarr/RTruffleSOM,SOM-st/PySOM,SOM-st/RPySOM,SOM-st/PySOM,SOM-st/RPySOM
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
driver.exe_name = 'som'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
Change binary name, and encode whether it uses a JIT compiler or not
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
if driver.config.translation.jit:
driver.exe_name = 'RPySOM-jit'
else:
driver.exe_name = 'RPySOM-no-jit'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
driver.exe_name = 'som'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
<commit_msg>Change binary name, and encode whether it uses a JIT compiler or not
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de><commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
if driver.config.translation.jit:
driver.exe_name = 'RPySOM-jit'
else:
driver.exe_name = 'RPySOM-no-jit'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
driver.exe_name = 'som'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
Change binary name, and encode whether it uses a JIT compiler or not
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
if driver.config.translation.jit:
driver.exe_name = 'RPySOM-jit'
else:
driver.exe_name = 'RPySOM-no-jit'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
driver.exe_name = 'som'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
<commit_msg>Change binary name, and encode whether it uses a JIT compiler or not
Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de><commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from som.vm.universe import main, Exit
from rpython.rlib import jit
#from rpython.rlib.debug import debug_start, debug_stop, debug_print
# __________ Entry points __________
def entry_point(argv):
try:
main(argv)
except Exit, e:
return e.code
return 1
# _____ Define and setup target ___
def target(driver, args):
if driver.config.translation.jit:
driver.exe_name = 'RPySOM-jit'
else:
driver.exe_name = 'RPySOM-no-jit'
return entry_point, None
def jitpolicy(driver):
from rpython.jit.codewriter.policy import JitPolicy
return JitPolicy()
if __name__ == '__main__':
from rpython.translator.driver import TranslationDriver
f, _ = target(TranslationDriver(), sys.argv)
sys.exit(f(sys.argv))
|
73373c893c1fe8412b5a3fecc83767988b1bccdf
|
genshi/__init__.py
|
genshi/__init__.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
try:
from pkg_resources import get_distribution, ResolutionError
try:
__version__ = get_distribution('Genshi').version
except ResolutionError:
__version__ = None # unknown
except ImportError:
__version__ = None # unknown
from genshi.core import *
from genshi.input import ParseError, XML, HTML
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
__version__ = '0.6'
from genshi.core import *
from genshi.input import ParseError, XML, HTML
|
Remove pkg_resources import from top-level package, will just need to remember updating the version in two places.
|
Remove pkg_resources import from top-level package, will just need to remember updating the version in two places.
|
Python
|
bsd-3-clause
|
hodgestar/genshi,hodgestar/genshi,hodgestar/genshi,hodgestar/genshi
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
try:
from pkg_resources import get_distribution, ResolutionError
try:
__version__ = get_distribution('Genshi').version
except ResolutionError:
__version__ = None # unknown
except ImportError:
__version__ = None # unknown
from genshi.core import *
from genshi.input import ParseError, XML, HTML
Remove pkg_resources import from top-level package, will just need to remember updating the version in two places.
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
__version__ = '0.6'
from genshi.core import *
from genshi.input import ParseError, XML, HTML
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
try:
from pkg_resources import get_distribution, ResolutionError
try:
__version__ = get_distribution('Genshi').version
except ResolutionError:
__version__ = None # unknown
except ImportError:
__version__ = None # unknown
from genshi.core import *
from genshi.input import ParseError, XML, HTML
<commit_msg>Remove pkg_resources import from top-level package, will just need to remember updating the version in two places.<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
__version__ = '0.6'
from genshi.core import *
from genshi.input import ParseError, XML, HTML
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
try:
from pkg_resources import get_distribution, ResolutionError
try:
__version__ = get_distribution('Genshi').version
except ResolutionError:
__version__ = None # unknown
except ImportError:
__version__ = None # unknown
from genshi.core import *
from genshi.input import ParseError, XML, HTML
Remove pkg_resources import from top-level package, will just need to remember updating the version in two places.# -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
__version__ = '0.6'
from genshi.core import *
from genshi.input import ParseError, XML, HTML
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
try:
from pkg_resources import get_distribution, ResolutionError
try:
__version__ = get_distribution('Genshi').version
except ResolutionError:
__version__ = None # unknown
except ImportError:
__version__ = None # unknown
from genshi.core import *
from genshi.input import ParseError, XML, HTML
<commit_msg>Remove pkg_resources import from top-level package, will just need to remember updating the version in two places.<commit_after># -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2008 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://genshi.edgewall.org/wiki/License.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://genshi.edgewall.org/log/.
"""This package provides various means for generating and processing web markup
(XML or HTML).
The design is centered around the concept of streams of markup events (similar
in concept to SAX parsing events) which can be processed in a uniform manner
independently of where or how they are produced.
"""
__docformat__ = 'restructuredtext en'
__version__ = '0.6'
from genshi.core import *
from genshi.input import ParseError, XML, HTML
|
1edf69ac029bf8e35cd897fa123ad4e0943d6bc9
|
src/wikicurses/__init__.py
|
src/wikicurses/__init__.py
|
from enum import Enum
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
|
from enum import IntEnum
class formats(IntEnum):
i, b, blockquote = (1<<i for i in range(3))
|
Remove BitEnum class, use IntEnum
|
Remove BitEnum class, use IntEnum
|
Python
|
mit
|
ids1024/wikicurses
|
from enum import Enum
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
Remove BitEnum class, use IntEnum
|
from enum import IntEnum
class formats(IntEnum):
i, b, blockquote = (1<<i for i in range(3))
|
<commit_before>from enum import Enum
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
<commit_msg>Remove BitEnum class, use IntEnum<commit_after>
|
from enum import IntEnum
class formats(IntEnum):
i, b, blockquote = (1<<i for i in range(3))
|
from enum import Enum
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
Remove BitEnum class, use IntEnumfrom enum import IntEnum
class formats(IntEnum):
i, b, blockquote = (1<<i for i in range(3))
|
<commit_before>from enum import Enum
class BitEnum(int, Enum):
def __new__(cls, *args):
value = 1 << len(cls.__members__)
return int.__new__(cls, value)
formats = BitEnum("formats", "i b blockquote")
<commit_msg>Remove BitEnum class, use IntEnum<commit_after>from enum import IntEnum
class formats(IntEnum):
i, b, blockquote = (1<<i for i in range(3))
|
96e0f2621dafd691e4560afe9b59df21aad3d2a8
|
taskwiki/cache.py
|
taskwiki/cache.py
|
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
return task
def __iter__(self):
# iterated_cache = {
while self.cache.keys():
for key in list(self.cache.keys()):
task = self.cache[key]
if all([t.line_number not in self.cache.keys()
for t in task.add_dependencies]):
del self.cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
import copy
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
if task.uuid:
self.uuid_cache[task.uuid] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
Index tasks by uuid as well as line number
|
Cache: Index tasks by uuid as well as line number
|
Python
|
mit
|
phha/taskwiki,Spirotot/taskwiki
|
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
return task
def __iter__(self):
# iterated_cache = {
while self.cache.keys():
for key in list(self.cache.keys()):
task = self.cache[key]
if all([t.line_number not in self.cache.keys()
for t in task.add_dependencies]):
del self.cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
Cache: Index tasks by uuid as well as line number
|
import copy
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
if task.uuid:
self.uuid_cache[task.uuid] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
<commit_before>import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
return task
def __iter__(self):
# iterated_cache = {
while self.cache.keys():
for key in list(self.cache.keys()):
task = self.cache[key]
if all([t.line_number not in self.cache.keys()
for t in task.add_dependencies]):
del self.cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
<commit_msg>Cache: Index tasks by uuid as well as line number<commit_after>
|
import copy
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
if task.uuid:
self.uuid_cache[task.uuid] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
return task
def __iter__(self):
# iterated_cache = {
while self.cache.keys():
for key in list(self.cache.keys()):
task = self.cache[key]
if all([t.line_number not in self.cache.keys()
for t in task.add_dependencies]):
del self.cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
Cache: Index tasks by uuid as well as line numberimport copy
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
if task.uuid:
self.uuid_cache[task.uuid] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
<commit_before>import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
return task
def __iter__(self):
# iterated_cache = {
while self.cache.keys():
for key in list(self.cache.keys()):
task = self.cache[key]
if all([t.line_number not in self.cache.keys()
for t in task.add_dependencies]):
del self.cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
<commit_msg>Cache: Index tasks by uuid as well as line number<commit_after>import copy
import vim
from taskwiki.task import VimwikiTask
class TaskCache(object):
"""
A cache that holds all the tasks in the given file and prevents
multiple redundant taskwarrior calls.
"""
def __init__(self, tw):
self.uuid_cache = dict()
self.cache = dict()
self.tw = tw
def __getitem__(self, key):
task = self.cache.get(key)
if task is None:
task = VimwikiTask(vim.current.buffer[key], key, self.tw, self)
self.cache[key] = task
if task.uuid:
self.uuid_cache[task.uuid] = task
return task
def __iter__(self):
iterated_cache = copy.copy(self.cache)
while iterated_cache.keys():
for key in list(iterated_cache.keys()):
task = iterated_cache[key]
if all([t.line_number not in iterated_cache.keys()
for t in task.add_dependencies]):
del iterated_cache[key]
yield task
def reset(self):
self.cache = dict()
# def update_tasks(self):
# tasks = [t
|
be1d11bcf53ecab1fbb0e69191c62c83492363d2
|
cmn_color_helper.py
|
cmn_color_helper.py
|
class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._UNDERLINE + ColorStr._BOLD + ColorStr._OKBLUE + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._HEADER + pkg + ColorStr._ENDC
|
class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._HEADER + ColorStr._BOLD + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._UNDERLINE + pkg + ColorStr._ENDC
|
Change function and pkg color style.
|
Change function and pkg color style.
|
Python
|
mit
|
fanchen1988/sc-common-helper
|
class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._UNDERLINE + ColorStr._BOLD + ColorStr._OKBLUE + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._HEADER + pkg + ColorStr._ENDC
Change function and pkg color style.
|
class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._HEADER + ColorStr._BOLD + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._UNDERLINE + pkg + ColorStr._ENDC
|
<commit_before>class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._UNDERLINE + ColorStr._BOLD + ColorStr._OKBLUE + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._HEADER + pkg + ColorStr._ENDC
<commit_msg>Change function and pkg color style.<commit_after>
|
class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._HEADER + ColorStr._BOLD + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._UNDERLINE + pkg + ColorStr._ENDC
|
class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._UNDERLINE + ColorStr._BOLD + ColorStr._OKBLUE + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._HEADER + pkg + ColorStr._ENDC
Change function and pkg color style.class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._HEADER + ColorStr._BOLD + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._UNDERLINE + pkg + ColorStr._ENDC
|
<commit_before>class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._UNDERLINE + ColorStr._BOLD + ColorStr._OKBLUE + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._HEADER + pkg + ColorStr._ENDC
<commit_msg>Change function and pkg color style.<commit_after>class ColorStr:
_HEADER = '\033[95m'
_OKBLUE = '\033[94m'
_OKGREEN = '\033[92m'
_WARNING = '\033[93m'
_FAIL = '\033[91m'
_ENDC = '\033[0m'
_BOLD = '\033[1m'
_UNDERLINE = '\033[4m'
@staticmethod
def color_fun_name(fun):
return ColorStr._HEADER + ColorStr._BOLD + fun + ColorStr._ENDC
@staticmethod
def color_pkg_name(pkg):
return ColorStr._UNDERLINE + pkg + ColorStr._ENDC
|
a175dbf2f239690cb5128698d5896233467e285e
|
huxley/settings/pipeline.py
|
huxley/settings/pipeline.py
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'css/*.css',
'scss/core/*.scss',
'scss/accounts/*.scss',
'scss/advisors/*.scss',
'scss/chairs/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'scss/core/*.scss',
'scss/accounts/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
|
Clean up file patterns in PIPELINE_CSS setting.
|
Clean up file patterns in PIPELINE_CSS setting.
|
Python
|
bsd-3-clause
|
ctmunwebmaster/huxley,ctmunwebmaster/huxley,nathanielparke/huxley,bmun/huxley,bmun/huxley,nathanielparke/huxley,nathanielparke/huxley,nathanielparke/huxley,ctmunwebmaster/huxley,bmun/huxley,ctmunwebmaster/huxley,bmun/huxley
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'css/*.css',
'scss/core/*.scss',
'scss/accounts/*.scss',
'scss/advisors/*.scss',
'scss/chairs/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
Clean up file patterns in PIPELINE_CSS setting.
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'scss/core/*.scss',
'scss/accounts/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
|
<commit_before># Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'css/*.css',
'scss/core/*.scss',
'scss/accounts/*.scss',
'scss/advisors/*.scss',
'scss/chairs/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
<commit_msg>Clean up file patterns in PIPELINE_CSS setting.<commit_after>
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'scss/core/*.scss',
'scss/accounts/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'css/*.css',
'scss/core/*.scss',
'scss/accounts/*.scss',
'scss/advisors/*.scss',
'scss/chairs/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
Clean up file patterns in PIPELINE_CSS setting.# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'scss/core/*.scss',
'scss/accounts/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
|
<commit_before># Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'css/*.css',
'scss/core/*.scss',
'scss/accounts/*.scss',
'scss/advisors/*.scss',
'scss/chairs/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
<commit_msg>Clean up file patterns in PIPELINE_CSS setting.<commit_after># Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
from os.path import join
from .roots import PROJECT_ROOT
PIPELINE_COMPILERS = (
'huxley.utils.pipeline.PySCSSCompiler',
'pipeline_browserify.compiler.BrowserifyCompiler',
)
PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.cssmin.CSSMinCompressor'
PIPELINE_JS_COMPRESSOR = None
PIPELINE_CSS = {
'huxley': {
'source_filenames': (
'scss/core/*.scss',
'scss/accounts/*.scss',
),
'output_filename': 'css/huxley.css'
},
}
PIPELINE_JS = {
'huxley': {
'source_filenames': (
'js/huxley.browserify.js',
),
'output_filename': 'js/huxley.js'
}
}
PIPELINE_BROWSERIFY_BINARY = join(PROJECT_ROOT, 'node_modules/.bin/browserify')
PIPELINE_BROWSERIFY_ARGUMENTS = '-t reactify'
|
85ff7e048a1e9c913adb7749cfed0aa903366197
|
data/load_data.py
|
data/load_data.py
|
import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
|
import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
count = 0
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
count += 1
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
print "Added %d datapoints." % count
|
Add indicator for how many datapoints have been loaded
|
Add indicator for how many datapoints have been loaded
|
Python
|
mit
|
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
|
import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
Add indicator for how many datapoints have been loaded
|
import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
count = 0
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
count += 1
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
print "Added %d datapoints." % count
|
<commit_before>import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
<commit_msg>Add indicator for how many datapoints have been loaded<commit_after>
|
import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
count = 0
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
count += 1
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
print "Added %d datapoints." % count
|
import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
Add indicator for how many datapoints have been loadedimport csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
count = 0
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
count += 1
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
print "Added %d datapoints." % count
|
<commit_before>import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
<commit_msg>Add indicator for how many datapoints have been loaded<commit_after>import csv
from chemtools.ml import get_decay_feature_vector
from chemtools.mol_name import get_exact_name
from models import DataPoint
def main(path):
with open(path, "r") as csvfile:
reader = csv.reader(csvfile, delimiter=',', quotechar='"')
points = []
count = 0
for row in reader:
if row == []:
continue
try:
band_gap = row[10]
if band_gap == '---':
band_gap = None
options = row[4]
try:
exact_name = get_exact_name(row[1])
try:
decay_feature = get_decay_feature_vector(exact_name)
except:
decay_feature = None
except:
exact_name = None
decay_feature = None
point = DataPoint(
name=row[1], options=row[4],
homo=row[5], lumo=row[6],
homo_orbital=row[7], dipole=row[8],
energy=row[9], band_gap=band_gap,
exact_name=exact_name,
decay_feature=decay_feature)
point.clean_fields()
points.append(point)
count += 1
except Exception as e:
pass
DataPoint.objects.bulk_create(points)
print "Added %d datapoints." % count
|
7fff4438510c30e592db858b2d519eddb5837a6c
|
geomet/__init__.py
|
geomet/__init__.py
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.3.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.0.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
Increment lib version to 1.0.0
|
Increment lib version to 1.0.0
|
Python
|
apache-2.0
|
geomet/geomet,geomet/geomet
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.3.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
Increment lib version to 1.0.0
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.0.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
<commit_before># Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.3.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
<commit_msg>Increment lib version to 1.0.0<commit_after>
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.0.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.3.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
Increment lib version to 1.0.0# Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.0.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
<commit_before># Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '0.3.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
<commit_msg>Increment lib version to 1.0.0<commit_after># Copyright 2013 Lars Butler & individual contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.0.0'
class InvalidGeoJSONException(Exception):
"""
Simple exception class to indicate if invalid GeoJSON is encountered.
"""
|
3af4434a40724d52cdb29d823041b0d44be4b753
|
purkinje/purkinje.py
|
purkinje/purkinje.py
|
#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
|
#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
if app.debug:
app.config.update(SEND_FILE_MAX_AGE_DEFAULT=0)
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
|
Disable Flask 12h caching in debug mode - prevented refreshing of Angular directive templates
|
Disable Flask 12h caching in debug mode
- prevented refreshing of Angular directive templates
|
Python
|
mit
|
bbiskup/purkinje,bbiskup/purkinje,bbiskup/purkinje,bbiskup/purkinje
|
#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
Disable Flask 12h caching in debug mode
- prevented refreshing of Angular directive templates
|
#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
if app.debug:
app.config.update(SEND_FILE_MAX_AGE_DEFAULT=0)
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
|
<commit_before>#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
<commit_msg>Disable Flask 12h caching in debug mode
- prevented refreshing of Angular directive templates<commit_after>
|
#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
if app.debug:
app.config.update(SEND_FILE_MAX_AGE_DEFAULT=0)
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
|
#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
Disable Flask 12h caching in debug mode
- prevented refreshing of Angular directive templates#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
if app.debug:
app.config.update(SEND_FILE_MAX_AGE_DEFAULT=0)
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
|
<commit_before>#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
<commit_msg>Disable Flask 12h caching in debug mode
- prevented refreshing of Angular directive templates<commit_after>#!/usr/bin/env python
"""Main module"""
from __future__ import print_function
from __future__ import absolute_import
import gevent
import gevent.monkey
gevent.monkey.patch_all()
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
import werkzeug.serving
# from werkzeug.debug import DebuggedApplication
# TODO .app gives error about relative import
from .app import get_app, send_bulk
APP_PORT = 5000
DEBUG = True
#
def main():
"""Starts web application
"""
@werkzeug.serving.run_with_reloader
def go():
app = get_app()
app.debug = DEBUG
if app.debug:
app.config.update(SEND_FILE_MAX_AGE_DEFAULT=0)
# TODO: asset debug settings will cause bad YSLOW rating
app.config['COMPRESS_DEBUG'] = False
app.config['ASSETS_DEBUG'] = DEBUG
# Breaks web socket communication
# (WebSocketConnectionClosedException in client)
# app = DebuggedApplication(app, evalex=True)
http_server = WSGIServer(('', APP_PORT),
app,
handler_class=WebSocketHandler)
# gevent.spawn(send_dummy_notifications)
gevent.spawn(send_bulk)
http_server.serve_forever()
# app.run()
if __name__ == '__main__':
main = werkzeug.serving.run_with_reloader(main)
print('purkinje ready')
main()
|
dd69f35b623fa93579930e03c3aea8fc8f290136
|
lc0001_two_sum.py
|
lc0001_two_sum.py
|
"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, num in enumerate(nums):
if target - num in num_idx_d:
return [num_idx_d[target - num], i]
num_idx_d[num] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
|
"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, n in enumerate(nums):
if target - n in num_idx_d:
return [num_idx_d[target - n], i]
num_idx_d[n] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
|
Revise to var n & add space line
|
Revise to var n & add space line
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, num in enumerate(nums):
if target - num in num_idx_d:
return [num_idx_d[target - num], i]
num_idx_d[num] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
Revise to var n & add space line
|
"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, n in enumerate(nums):
if target - n in num_idx_d:
return [num_idx_d[target - n], i]
num_idx_d[n] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, num in enumerate(nums):
if target - num in num_idx_d:
return [num_idx_d[target - num], i]
num_idx_d[num] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
<commit_msg>Revise to var n & add space line<commit_after>
|
"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, n in enumerate(nums):
if target - n in num_idx_d:
return [num_idx_d[target - n], i]
num_idx_d[n] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
|
"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, num in enumerate(nums):
if target - num in num_idx_d:
return [num_idx_d[target - num], i]
num_idx_d[num] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
Revise to var n & add space line"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, n in enumerate(nums):
if target - n in num_idx_d:
return [num_idx_d[target - n], i]
num_idx_d[n] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, num in enumerate(nums):
if target - num in num_idx_d:
return [num_idx_d[target - num], i]
num_idx_d[num] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
<commit_msg>Revise to var n & add space line<commit_after>"""Leetcode 1. Two Sum
Easy
URL: https://leetcode.com/problems/two-sum/description/
Given an array of integers, return indices of the two numbers such that
they add up to a specific target.
You may assume that each input would have exactly one solution,
and you may not use the same element twice.
Example:
Given nums = [2, 7, 11, 15], target = 9,
Because nums[0] + nums[1] = 2 + 7 = 9,
return [0, 1].
"""
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(n).
Space complexity: O(n).
"""
num_idx_d = {}
for i, n in enumerate(nums):
if target - n in num_idx_d:
return [num_idx_d[target - n], i]
num_idx_d[n] = i
return []
def main():
print Solution().twoSum([2, 7, 11, 15], 9)
if __name__ == '__main__':
main()
|
bbc65d55d247d290a427ac5ba2c43b9d0033654d
|
WeatherServer/weather/views.py
|
WeatherServer/weather/views.py
|
import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
ip = request.remote_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
|
import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
if request.headers.getlist("X-Forwarded-For"):
ip = request.headers.getlist("X-Forwarded-For")[0]
else:
ip = request.remotw_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
|
Fix user's real ip address.
|
Fix user's real ip address.
|
Python
|
mit
|
keysona/WeatherServer,keysona/WeatherServer,keysona/WeatherServer,keysona/WeatherServer
|
import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
ip = request.remote_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
Fix user's real ip address.
|
import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
if request.headers.getlist("X-Forwarded-For"):
ip = request.headers.getlist("X-Forwarded-For")[0]
else:
ip = request.remotw_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
|
<commit_before>import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
ip = request.remote_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
<commit_msg>Fix user's real ip address.<commit_after>
|
import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
if request.headers.getlist("X-Forwarded-For"):
ip = request.headers.getlist("X-Forwarded-For")[0]
else:
ip = request.remotw_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
|
import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
ip = request.remote_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
Fix user's real ip address.import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
if request.headers.getlist("X-Forwarded-For"):
ip = request.headers.getlist("X-Forwarded-For")[0]
else:
ip = request.remotw_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
|
<commit_before>import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
ip = request.remote_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
<commit_msg>Fix user's real ip address.<commit_after>import IP
from flask import Blueprint, request, render_template, jsonify
weather = Blueprint('weather', __name__, url_prefix='/weather')
@weather.route('/', methods=['GET'])
def index():
if request.headers.getlist("X-Forwarded-For"):
ip = request.headers.getlist("X-Forwarded-For")[0]
else:
ip = request.remotw_addr
location = IP.find(ip)
return jsonify(location=location, ip=ip)
|
4d30c1b3b475debefc2b6b0226e1fc62eb1f1bfa
|
wagtailmenus/migrations/0011_auto_20160415_1519.py
|
wagtailmenus/migrations/0011_auto_20160415_1519.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
|
Replace binary strings with normal strings in an older migration
|
Replace binary strings with normal strings in an older migration
|
Python
|
mit
|
ababic/wagtailmenus,rkhleics/wagtailmenus,ababic/wagtailmenus,rkhleics/wagtailmenus,ababic/wagtailmenus,rkhleics/wagtailmenus
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
Replace binary strings with normal strings in an older migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
<commit_msg>Replace binary strings with normal strings in an older migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
Replace binary strings with normal strings in an older migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text=b"Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
<commit_msg>Replace binary strings with normal strings in an older migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wagtailmenus', '0010_auto_20160201_1558'),
]
operations = [
migrations.AlterField(
model_name='flatmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
migrations.AlterField(
model_name='mainmenuitem',
name='url_append',
field=models.CharField(help_text="Use this to optionally append a #hash or querystring to the above page's URL.", max_length=255, verbose_name='Append to URL', blank=True),
),
]
|
f28ece27ad325a17be20963cd30d91b9ddc913aa
|
winworker/create/files/utilities/add_git_credentials.py
|
winworker/create/files/utilities/add_git_credentials.py
|
#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')
|
#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '\\.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')
|
Fix a path error when copying git credentialswq
|
Fix a path error when copying git credentialswq
|
Python
|
mit
|
hicknhack-software/ansible-buildbot
|
#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')Fix a path error when copying git credentialswq
|
#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '\\.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')
|
<commit_before>#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')<commit_msg>Fix a path error when copying git credentialswq<commit_after>
|
#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '\\.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')
|
#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')Fix a path error when copying git credentialswq#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '\\.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')
|
<commit_before>#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')<commit_msg>Fix a path error when copying git credentialswq<commit_after>#!/usr/bin/python
import sys
import os.path
if not len(sys.argv) == 2:
sys.exit(1)
credentials_file = os.path.expanduser('~') + '\\.git-credentials'
credentials = sys.argv[1]
with open(credentials_file, 'ab') as f:
f.write(credentials + '\n')
|
7dc6f71c758b1735eaee4bde9ac5cc2c3ff26e15
|
memegen/routes/latest.py
|
memegen/routes/latest.py
|
from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=1,
ga_tid=get_tid(),
)
|
from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=5,
ga_tid=get_tid(),
)
|
Set refresh rate to 5 seconds
|
Set refresh rate to 5 seconds
|
Python
|
mit
|
DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen
|
from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=1,
ga_tid=get_tid(),
)
Set refresh rate to 5 seconds
|
from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=5,
ga_tid=get_tid(),
)
|
<commit_before>from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=1,
ga_tid=get_tid(),
)
<commit_msg>Set refresh rate to 5 seconds<commit_after>
|
from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=5,
ga_tid=get_tid(),
)
|
from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=1,
ga_tid=get_tid(),
)
Set refresh rate to 5 secondsfrom flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=5,
ga_tid=get_tid(),
)
|
<commit_before>from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=1,
ga_tid=get_tid(),
)
<commit_msg>Set refresh rate to 5 seconds<commit_after>from flask import Blueprint, render_template
from flask_api.decorators import set_renderers
from flask_api.renderers import HTMLRenderer
from ._common import route, get_tid
blueprint = Blueprint('latest', __name__, url_prefix="/latest")
@blueprint.route("")
@set_renderers(HTMLRenderer)
def get():
return render_template(
'latest.html',
srcs=[route('image.get_latest', index=i + 1) for i in range(9)],
refresh=5,
ga_tid=get_tid(),
)
|
01df07dbdd38cc0166c75592c7f8508b5618dc21
|
adrian/cgen/_generator.py
|
adrian/cgen/_generator.py
|
from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
def generate(self, node):
return self.get_registry()[node](node)
|
from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
def _type(self, type_):
# TODO: implement
# return "int"
errors.not_implemented()
def _expr(self, expr):
if isinstance(expr, objects.Val):
return self._val(expr)
elif isinstance(expr, objects.Var):
return self._var(expr)
errors.not_implemented()
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
@_layers.register(objects.Decl)
def _decl(self, decl):
return " ".join([
self._type(decl.type_),
decl.name,
"=",
"".join([self._expr(decl.expr), ";"])
])
def generate(self, node):
return self.get_registry()[node](node)
|
Add high-level decl translation function without low-level functions' implementations
|
Add high-level decl translation function without low-level functions' implementations
|
Python
|
bsd-3-clause
|
adrian-lang/adrian.cgen
|
from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
def generate(self, node):
return self.get_registry()[node](node)
Add high-level decl translation function without low-level functions' implementations
|
from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
def _type(self, type_):
# TODO: implement
# return "int"
errors.not_implemented()
def _expr(self, expr):
if isinstance(expr, objects.Val):
return self._val(expr)
elif isinstance(expr, objects.Var):
return self._var(expr)
errors.not_implemented()
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
@_layers.register(objects.Decl)
def _decl(self, decl):
return " ".join([
self._type(decl.type_),
decl.name,
"=",
"".join([self._expr(decl.expr), ";"])
])
def generate(self, node):
return self.get_registry()[node](node)
|
<commit_before>from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
def generate(self, node):
return self.get_registry()[node](node)
<commit_msg>Add high-level decl translation function without low-level functions' implementations<commit_after>
|
from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
def _type(self, type_):
# TODO: implement
# return "int"
errors.not_implemented()
def _expr(self, expr):
if isinstance(expr, objects.Val):
return self._val(expr)
elif isinstance(expr, objects.Var):
return self._var(expr)
errors.not_implemented()
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
@_layers.register(objects.Decl)
def _decl(self, decl):
return " ".join([
self._type(decl.type_),
decl.name,
"=",
"".join([self._expr(decl.expr), ";"])
])
def generate(self, node):
return self.get_registry()[node](node)
|
from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
def generate(self, node):
return self.get_registry()[node](node)
Add high-level decl translation function without low-level functions' implementationsfrom . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
def _type(self, type_):
# TODO: implement
# return "int"
errors.not_implemented()
def _expr(self, expr):
if isinstance(expr, objects.Val):
return self._val(expr)
elif isinstance(expr, objects.Var):
return self._var(expr)
errors.not_implemented()
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
@_layers.register(objects.Decl)
def _decl(self, decl):
return " ".join([
self._type(decl.type_),
decl.name,
"=",
"".join([self._expr(decl.expr), ";"])
])
def generate(self, node):
return self.get_registry()[node](node)
|
<commit_before>from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
def generate(self, node):
return self.get_registry()[node](node)
<commit_msg>Add high-level decl translation function without low-level functions' implementations<commit_after>from . import errors
from . import objects
from . import _context
from . import _layers
class NodeGenerator(_layers.Layer):
def _type(self, type_):
# TODO: implement
# return "int"
errors.not_implemented()
def _expr(self, expr):
if isinstance(expr, objects.Val):
return self._val(expr)
elif isinstance(expr, objects.Var):
return self._var(expr)
errors.not_implemented()
@_layers.register(objects.Val)
def _val(self, val):
if isinstance(val.type_, tuple(map(type, (
objects.CTypes.int_fast8, objects.CTypes.int_fast32,
objects.CTypes.int_fast64, objects.CTypes.uint_fast8,
objects.CTypes.uint_fast32, objects.CTypes.uint_fast64)))):
return val.literal
elif isinstance(val.type_, type(objects.CTypes.char)):
return "'" + val.literal + "'"
errors.not_implemented()
@_layers.register(objects.Var)
def _var(self, var):
return var.name
@_layers.register(objects.Decl)
def _decl(self, decl):
return " ".join([
self._type(decl.type_),
decl.name,
"=",
"".join([self._expr(decl.expr), ";"])
])
def generate(self, node):
return self.get_registry()[node](node)
|
3a08414e02ae2e4d39951df21d16c62a6fab2a5c
|
dthm4kaiako/config/__init__.py
|
dthm4kaiako/config/__init__.py
|
"""Configuration for Django system."""
__version__ = "0.15.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.15.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
Increment version number to 0.15.2
|
Increment version number to 0.15.2
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Configuration for Django system."""
__version__ = "0.15.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.15.2
|
"""Configuration for Django system."""
__version__ = "0.15.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.15.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.15.2<commit_after>
|
"""Configuration for Django system."""
__version__ = "0.15.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.15.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.15.2"""Configuration for Django system."""
__version__ = "0.15.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.15.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.15.2<commit_after>"""Configuration for Django system."""
__version__ = "0.15.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
6b73de9fea31b7a5176601d7f19370291ba4e130
|
tests/test_transpiler.py
|
tests/test_transpiler.py
|
import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
|
import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
|
Make transpiler test remove files if they already exist
|
Make transpiler test remove files if they already exist
|
Python
|
mit
|
WesleyAC/lemonscript-transpiler,WesleyAC/lemonscript-transpiler,WesleyAC/lemonscript-transpiler
|
import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
Make transpiler test remove files if they already exist
|
import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
|
<commit_before>import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
<commit_msg>Make transpiler test remove files if they already exist<commit_after>
|
import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
|
import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
Make transpiler test remove files if they already existimport os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
|
<commit_before>import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
<commit_msg>Make transpiler test remove files if they already exist<commit_after>import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
|
903a618cbde1f6d4c18a806e9bb8c3d17bc58b3b
|
flocker/control/test/test_script.py
|
flocker/control/test/test_script.py
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor
class ControlOptionsTests():
"""
Tests for ``ControlOptions``.
"""
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptTests(SynchronousTestCase):
"""
Tests for ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor, StandardOptionsTestsMixin
class ControlOptionsTests(StandardOptionsTestsMixin,
SynchronousTestCase):
"""
Tests for ``ControlOptions``.
"""
options = ControlOptions
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptEffectsTests(SynchronousTestCase):
"""
Tests for effects ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
|
Make sure options tests run.
|
Make sure options tests run.
|
Python
|
apache-2.0
|
1d4Nf6/flocker,runcom/flocker,w4ngyi/flocker,lukemarsden/flocker,agonzalezro/flocker,w4ngyi/flocker,hackday-profilers/flocker,achanda/flocker,mbrukman/flocker,runcom/flocker,jml/flocker,1d4Nf6/flocker,achanda/flocker,adamtheturtle/flocker,agonzalezro/flocker,Azulinho/flocker,lukemarsden/flocker,moypray/flocker,1d4Nf6/flocker,LaynePeng/flocker,hackday-profilers/flocker,hackday-profilers/flocker,achanda/flocker,mbrukman/flocker,wallnerryan/flocker-profiles,Azulinho/flocker,agonzalezro/flocker,w4ngyi/flocker,LaynePeng/flocker,AndyHuu/flocker,jml/flocker,moypray/flocker,adamtheturtle/flocker,jml/flocker,Azulinho/flocker,LaynePeng/flocker,moypray/flocker,AndyHuu/flocker,wallnerryan/flocker-profiles,mbrukman/flocker,AndyHuu/flocker,runcom/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,lukemarsden/flocker
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor
class ControlOptionsTests():
"""
Tests for ``ControlOptions``.
"""
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptTests(SynchronousTestCase):
"""
Tests for ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
Make sure options tests run.
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor, StandardOptionsTestsMixin
class ControlOptionsTests(StandardOptionsTestsMixin,
SynchronousTestCase):
"""
Tests for ``ControlOptions``.
"""
options = ControlOptions
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptEffectsTests(SynchronousTestCase):
"""
Tests for effects ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
|
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor
class ControlOptionsTests():
"""
Tests for ``ControlOptions``.
"""
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptTests(SynchronousTestCase):
"""
Tests for ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
<commit_msg>Make sure options tests run.<commit_after>
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor, StandardOptionsTestsMixin
class ControlOptionsTests(StandardOptionsTestsMixin,
SynchronousTestCase):
"""
Tests for ``ControlOptions``.
"""
options = ControlOptions
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptEffectsTests(SynchronousTestCase):
"""
Tests for effects ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
|
# Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor
class ControlOptionsTests():
"""
Tests for ``ControlOptions``.
"""
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptTests(SynchronousTestCase):
"""
Tests for ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
Make sure options tests run.# Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor, StandardOptionsTestsMixin
class ControlOptionsTests(StandardOptionsTestsMixin,
SynchronousTestCase):
"""
Tests for ``ControlOptions``.
"""
options = ControlOptions
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptEffectsTests(SynchronousTestCase):
"""
Tests for effects ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
|
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor
class ControlOptionsTests():
"""
Tests for ``ControlOptions``.
"""
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptTests(SynchronousTestCase):
"""
Tests for ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
<commit_msg>Make sure options tests run.<commit_after># Copyright Hybrid Logic Ltd. See LICENSE file for details.
from twisted.web.server import Site
from twisted.trial.unittest import SynchronousTestCase
from ..script import ControlOptions, ControlScript
from ...testtools import MemoryCoreReactor, StandardOptionsTestsMixin
class ControlOptionsTests(StandardOptionsTestsMixin,
SynchronousTestCase):
"""
Tests for ``ControlOptions``.
"""
options = ControlOptions
def test_default_port(self):
"""
The default port configured by ``ControlOptions`` is 4523.
"""
options = ControlOptions()
options.parseOptions([])
self.assertEqual(options["port"], 4523)
def test_custom_port(self):
"""
The ``--port`` command-line option allows configuring the port.
"""
options = ControlOptions()
options.parseOptions(["--port", 1234])
self.assertEqual(options["port"], 1234)
class ControlScriptEffectsTests(SynchronousTestCase):
"""
Tests for effects ``ControlScript``.
"""
def test_starts_http_api_server(self):
"""
``ControlScript.main`` starts a HTTP server on the given port.
"""
reactor = MemoryCoreReactor()
ControlScript().main(reactor, {"port": 8001})
server = reactor.tcpServers[0]
port = server[0]
factory = server[1].__class__
self.assertEqual((port, factory), (8001, Site))
|
6642c377d579a8401eb5827f25a1aaf6ab117921
|
tests/benchmark/plugins/clear_buffer_cache.py
|
tests/benchmark/plugins/clear_buffer_cache.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
cmd = "sysctl -w vm.drop_caches=3 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
# Drop the page cache (drop_caches=1). We'll leave the inodes and dentries
# since that is not what we are testing and it causes excessive performance
# variability.
cmd = "sysctl -w vm.drop_caches=1 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
|
Update clear buffer cache plugin to only flush page cache.
|
Update clear buffer cache plugin to only flush page cache.
More detail: http://linux-mm.org/Drop_Caches
Change-Id: I7fa675ccdc81f375d88e9cfab330fca3bc983ec8
Reviewed-on: http://gerrit.ent.cloudera.com:8080/1157
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
Reviewed-by: Lenni Kuff <724b7df200764f5dc1b723c05ee6c6adabd11bb1@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
|
Python
|
apache-2.0
|
cloudera/Impala,cloudera/Impala,cloudera/Impala,cloudera/Impala,cloudera/Impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,michaelhkw/incubator-impala,cloudera/Impala,cloudera/Impala,michaelhkw/incubator-impala
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
cmd = "sysctl -w vm.drop_caches=3 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
Update clear buffer cache plugin to only flush page cache.
More detail: http://linux-mm.org/Drop_Caches
Change-Id: I7fa675ccdc81f375d88e9cfab330fca3bc983ec8
Reviewed-on: http://gerrit.ent.cloudera.com:8080/1157
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
Reviewed-by: Lenni Kuff <724b7df200764f5dc1b723c05ee6c6adabd11bb1@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
# Drop the page cache (drop_caches=1). We'll leave the inodes and dentries
# since that is not what we are testing and it causes excessive performance
# variability.
cmd = "sysctl -w vm.drop_caches=1 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
cmd = "sysctl -w vm.drop_caches=3 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
<commit_msg>Update clear buffer cache plugin to only flush page cache.
More detail: http://linux-mm.org/Drop_Caches
Change-Id: I7fa675ccdc81f375d88e9cfab330fca3bc983ec8
Reviewed-on: http://gerrit.ent.cloudera.com:8080/1157
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
Reviewed-by: Lenni Kuff <724b7df200764f5dc1b723c05ee6c6adabd11bb1@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com><commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
# Drop the page cache (drop_caches=1). We'll leave the inodes and dentries
# since that is not what we are testing and it causes excessive performance
# variability.
cmd = "sysctl -w vm.drop_caches=1 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
|
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
cmd = "sysctl -w vm.drop_caches=3 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
Update clear buffer cache plugin to only flush page cache.
More detail: http://linux-mm.org/Drop_Caches
Change-Id: I7fa675ccdc81f375d88e9cfab330fca3bc983ec8
Reviewed-on: http://gerrit.ent.cloudera.com:8080/1157
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
Reviewed-by: Lenni Kuff <724b7df200764f5dc1b723c05ee6c6adabd11bb1@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
# Drop the page cache (drop_caches=1). We'll leave the inodes and dentries
# since that is not what we are testing and it causes excessive performance
# variability.
cmd = "sysctl -w vm.drop_caches=1 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
cmd = "sysctl -w vm.drop_caches=3 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
<commit_msg>Update clear buffer cache plugin to only flush page cache.
More detail: http://linux-mm.org/Drop_Caches
Change-Id: I7fa675ccdc81f375d88e9cfab330fca3bc983ec8
Reviewed-on: http://gerrit.ent.cloudera.com:8080/1157
Reviewed-by: Alex Behm <fe1626037acfc2dc542d2aa723a6d14f2464a20c@cloudera.com>
Reviewed-by: Lenni Kuff <724b7df200764f5dc1b723c05ee6c6adabd11bb1@cloudera.com>
Tested-by: Nong Li <99a5e5f8f5911755b88e0b536d46aafa102bed41@cloudera.com><commit_after>#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tests.util.cluster_controller import ClusterController
from tests.benchmark.plugins import Plugin
class ClearBufferCache(Plugin):
"""Plugin that clears the buffer cache before a query is run."""
__name__ = "ClearBufferCache"
def __init__(self, *args, **kwargs):
self.cluster_controller = ClusterController(*args, **kwargs)
Plugin.__init__(self, *args, **kwargs)
def run_pre_hook(self, context=None):
# Drop the page cache (drop_caches=1). We'll leave the inodes and dentries
# since that is not what we are testing and it causes excessive performance
# variability.
cmd = "sysctl -w vm.drop_caches=1 vm.drop_caches=0"
self.cluster_controller.run_cmd(cmd)
|
ceadcb80150278ae29fb60b339049f4c840c135d
|
astroquery/nist/tests/test_nist_remote.py
|
astroquery/nist/tests/test_nist_remote.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
import requests
import imp
from ... import nist
imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
from ... import nist
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
|
Add missing numpy import, and cleanup the rest
|
Add missing numpy import, and cleanup the rest
|
Python
|
bsd-3-clause
|
ceb8/astroquery,imbasimba/astroquery,ceb8/astroquery,imbasimba/astroquery
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
import requests
import imp
from ... import nist
imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
Add missing numpy import, and cleanup the rest
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
from ... import nist
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
import requests
import imp
from ... import nist
imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
<commit_msg>Add missing numpy import, and cleanup the rest<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
from ... import nist
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
import requests
import imp
from ... import nist
imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
Add missing numpy import, and cleanup the rest# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
from ... import nist
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
import requests
import imp
from ... import nist
imp.reload(requests)
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
<commit_msg>Add missing numpy import, and cleanup the rest<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import print_function
import numpy as np
from astropy.tests.helper import remote_data
from astropy.table import Table
import astropy.units as u
from ... import nist
@remote_data
class TestNist:
def test_query_async(self):
response = nist.core.Nist.query_async(4000 * u.nm, 7000 * u.nm)
assert response is not None
def test_query(self):
result = nist.core.Nist.query(4000 * u.nm, 7000 * u.nm)
assert isinstance(result, Table)
# check that no javascript was left in the table
# (regression test for 1355)
assert np.all(result['TP'] == 'T8637')
|
76728fcba7671575053620da9e1e26aaa279547a
|
awx/main/notifications/webhook_backend.py
|
awx/main/notifications/webhook_backend.py
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
Set a user agent for the webhook if not provided
|
Set a user agent for the webhook if not provided
|
Python
|
apache-2.0
|
wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
Set a user agent for the webhook if not provided
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
<commit_before># Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
<commit_msg>Set a user agent for the webhook if not provided<commit_after>
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
Set a user agent for the webhook if not provided# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
<commit_before># Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
<commit_msg>Set a user agent for the webhook if not provided<commit_after># Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import logging
import requests
import json
from django.utils.encoding import smart_text
from awx.main.notifications.base import TowerBaseEmailBackend
from awx.main.utils import get_awx_version
logger = logging.getLogger('awx.main.notifications.webhook_backend')
class WebhookBackend(TowerBaseEmailBackend):
init_parameters = {"url": {"label": "Target URL", "type": "string"},
"headers": {"label": "HTTP Headers", "type": "object"}}
recipient_parameter = "url"
sender_parameter = None
def __init__(self, headers, fail_silently=False, **kwargs):
self.headers = headers
super(WebhookBackend, self).__init__(fail_silently=fail_silently)
def format_body(self, body):
return body
def send_messages(self, messages):
sent_messages = 0
if 'User-Agent' not in self.headers:
self.headers['User-Agent'] = "Tower {}".format(get_awx_version())
for m in messages:
r = requests.post("{}".format(m.recipients()[0]),
data=json.dumps(m.body),
headers=self.headers)
if r.status_code >= 400:
logger.error(smart_text("Error sending notification webhook: {}".format(r.text)))
if not self.fail_silently:
raise Exception(smart_text("Error sending notification webhook: {}".format(r.text)))
sent_messages += 1
return sent_messages
|
4630b898e37d0653baa22c98578eb06c82eebfe6
|
kobo/hub/admin.py
|
kobo/hub/admin.py
|
# -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
|
# -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner__username", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
|
Fix TaskAdmin to search for user in correct db field.
|
Fix TaskAdmin to search for user in correct db field.
|
Python
|
lgpl-2.1
|
pombredanne/https-git.fedorahosted.org-git-kobo,release-engineering/kobo,pombredanne/https-git.fedorahosted.org-git-kobo,pombredanne/https-git.fedorahosted.org-git-kobo,release-engineering/kobo,release-engineering/kobo,release-engineering/kobo,pombredanne/https-git.fedorahosted.org-git-kobo
|
# -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
Fix TaskAdmin to search for user in correct db field.
|
# -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner__username", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
|
<commit_before># -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
<commit_msg>Fix TaskAdmin to search for user in correct db field.<commit_after>
|
# -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner__username", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
|
# -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
Fix TaskAdmin to search for user in correct db field.# -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner__username", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
|
<commit_before># -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
<commit_msg>Fix TaskAdmin to search for user in correct db field.<commit_after># -*- coding: utf-8 -*-
import django.contrib.admin as admin
from models import *
class TaskAdmin(admin.ModelAdmin):
list_display = ("id", "method", "label", "state", "owner", "dt_created", "dt_finished", "time", "arch", "channel")
list_filter = ("method", "state")
search_fields = ("id", "method", "label", "owner__username", "dt_created", "dt_finished")
raw_id_fields = ("parent", "owner", "resubmitted_by", "resubmitted_from")
class WorkerAdmin(admin.ModelAdmin):
list_display = ("name", "enabled", "ready", "max_load", "current_load", "task_count")
admin.site.register(Arch)
admin.site.register(Channel)
admin.site.register(Worker, WorkerAdmin)
admin.site.register(Task, TaskAdmin)
|
ebc06c75186d7e856fb11b01d321edc156b4ad24
|
tests/conftest.py
|
tests/conftest.py
|
import pytest
import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(items):
# will execute as early as possible
items.sort(key=lambda item: item.parent.obj.ORDER if hasattr(item.parent.obj, 'ORDER') else 0)
def pytest_configure(config):
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
|
import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
def pytest_configure(config):
# TODO: this is dirty hack to be able to run pytest
if config:
return
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
|
Add hack to be able to run pytest
|
Add hack to be able to run pytest
|
Python
|
agpl-3.0
|
em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/pickup-rating,em92/quakelive-local-ratings,em92/pickup-rating,em92/pickup-rating,em92/quakelive-local-ratings
|
import pytest
import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(items):
# will execute as early as possible
items.sort(key=lambda item: item.parent.obj.ORDER if hasattr(item.parent.obj, 'ORDER') else 0)
def pytest_configure(config):
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
Add hack to be able to run pytest
|
import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
def pytest_configure(config):
# TODO: this is dirty hack to be able to run pytest
if config:
return
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
|
<commit_before>import pytest
import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(items):
# will execute as early as possible
items.sort(key=lambda item: item.parent.obj.ORDER if hasattr(item.parent.obj, 'ORDER') else 0)
def pytest_configure(config):
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
<commit_msg>Add hack to be able to run pytest<commit_after>
|
import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
def pytest_configure(config):
# TODO: this is dirty hack to be able to run pytest
if config:
return
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
|
import pytest
import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(items):
# will execute as early as possible
items.sort(key=lambda item: item.parent.obj.ORDER if hasattr(item.parent.obj, 'ORDER') else 0)
def pytest_configure(config):
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
Add hack to be able to run pytestimport sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
def pytest_configure(config):
# TODO: this is dirty hack to be able to run pytest
if config:
return
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
|
<commit_before>import pytest
import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
@pytest.hookimpl(tryfirst=True)
def pytest_collection_modifyitems(items):
# will execute as early as possible
items.sort(key=lambda item: item.parent.obj.ORDER if hasattr(item.parent.obj, 'ORDER') else 0)
def pytest_configure(config):
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
<commit_msg>Add hack to be able to run pytest<commit_after>import sys
import psycopg2
import os
from testing import postgresql as pgsql_test
postgresql = None
def pytest_configure(config):
# TODO: this is dirty hack to be able to run pytest
if config:
return
global postgresql
def handler(postgresql):
f = open(os.path.dirname(os.path.realpath(__file__)) + "/../sql/init.sql")
sql_query = f.read()
f.close()
conn = psycopg2.connect(**postgresql.dsn())
cursor = conn.cursor()
cursor.execute(sql_query)
cursor.close()
conn.commit()
conn.close()
# force default timezone to pass tests on os with different local timezone setting
pgsql_test.Postgresql.DEFAULT_SETTINGS['postgres_args'] += ' -c timezone=+5'
PGSQLFactory = pgsql_test.PostgresqlFactory(
cache_initialized_db=True,
on_initialized=handler
)
postgresql = PGSQLFactory()
os.environ["DATABASE_URL"] = postgresql.url()
from qllr.conf import settings
settings['use_avg_perf_tdm'] = True
sys.path.append(sys.path[0] + "/..")
def pytest_unconfigure(config):
global postgresql
postgresql.stop()
|
3189b35f4fea14962cc5dcff7385d7e02bba4e01
|
plata/product/feincms/models.py
|
plata/product/feincms/models.py
|
from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
|
from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
The admin integration requires FeinCMS >=1.2 to work correctly.
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
|
Add note about FeinCMS version requirements
|
Add note about FeinCMS version requirements
|
Python
|
bsd-3-clause
|
armicron/plata,armicron/plata,stefanklug/plata,armicron/plata,allink/plata
|
from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
Add note about FeinCMS version requirements
|
from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
The admin integration requires FeinCMS >=1.2 to work correctly.
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
|
<commit_before>from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
<commit_msg>Add note about FeinCMS version requirements<commit_after>
|
from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
The admin integration requires FeinCMS >=1.2 to work correctly.
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
|
from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
Add note about FeinCMS version requirementsfrom django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
The admin integration requires FeinCMS >=1.2 to work correctly.
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
|
<commit_before>from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
<commit_msg>Add note about FeinCMS version requirements<commit_after>from django.utils.translation import get_language, ugettext_lazy as _
from feincms.models import Base
from plata.product.models import Product, ProductManager
class CMSProduct(Product, Base):
"""
FeinCMS-based product model
The admin integration requires FeinCMS >=1.2 to work correctly.
"""
class Meta:
app_label = 'product'
verbose_name = _('product')
verbose_name_plural = _('products')
objects = ProductManager()
|
8528f21397672b5719fcf4edecd8efa3a1eec60a
|
cellardoor/serializers/json_serializer.py
|
cellardoor/serializers/json_serializer.py
|
import re
import json
from datetime import datetime
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
import re
import json
from datetime import datetime
import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, collections.Iterable):
return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
Use more reliable method of detecting iterables
|
Use more reliable method of detecting iterables
|
Python
|
mit
|
cooper-software/cellardoor
|
import re
import json
from datetime import datetime
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)Use more reliable method of detecting iterables
|
import re
import json
from datetime import datetime
import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, collections.Iterable):
return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
<commit_before>import re
import json
from datetime import datetime
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)<commit_msg>Use more reliable method of detecting iterables<commit_after>
|
import re
import json
from datetime import datetime
import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, collections.Iterable):
return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
import re
import json
from datetime import datetime
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)Use more reliable method of detecting iterablesimport re
import json
from datetime import datetime
import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, collections.Iterable):
return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
<commit_before>import re
import json
from datetime import datetime
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)<commit_msg>Use more reliable method of detecting iterables<commit_after>import re
import json
from datetime import datetime
import collections
from . import Serializer
class CellarDoorJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, collections.Iterable):
return list(obj)
if isinstance(obj, datetime):
return obj.isoformat()
return super(CellarDoorJSONEncoder, self).default(obj)
def as_date(obj):
if '_date' in obj:
return datetime(*map(int, re.split('[^\d]', obj['_date'])[:-1]))
else:
return obj
class JSONSerializer(Serializer):
mimetype = 'application/json'
def serialize(self, obj):
return json.dumps(obj, cls=CellarDoorJSONEncoder)
def unserialize(self, stream):
return json.load(stream, object_hook=as_date)
def unserialize_string(self, data):
return json.loads(data, object_hook=as_date)
|
daeabec6f055ca232903f50f307b5ab8a518b1aa
|
apps/domain/src/main/core/manager/environment_manager.py
|
apps/domain/src/main/core/manager/environment_manager.py
|
from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
|
from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def delete_associations(self, environment_id):
# Delete User environment Association
associations = (
self.db.session.query(self._association_schema)
.filter_by(environment=environment_id)
.all()
)
for association in associations:
self.db.session.delete(association)
self.db.session.commit()
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
|
ADD delete_associations method at EnvironmentManager
|
ADD delete_associations method at EnvironmentManager
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
ADD delete_associations method at EnvironmentManager
|
from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def delete_associations(self, environment_id):
# Delete User environment Association
associations = (
self.db.session.query(self._association_schema)
.filter_by(environment=environment_id)
.all()
)
for association in associations:
self.db.session.delete(association)
self.db.session.commit()
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
|
<commit_before>from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
<commit_msg>ADD delete_associations method at EnvironmentManager<commit_after>
|
from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def delete_associations(self, environment_id):
# Delete User environment Association
associations = (
self.db.session.query(self._association_schema)
.filter_by(environment=environment_id)
.all()
)
for association in associations:
self.db.session.delete(association)
self.db.session.commit()
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
|
from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
ADD delete_associations method at EnvironmentManagerfrom typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def delete_associations(self, environment_id):
# Delete User environment Association
associations = (
self.db.session.query(self._association_schema)
.filter_by(environment=environment_id)
.all()
)
for association in associations:
self.db.session.delete(association)
self.db.session.commit()
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
|
<commit_before>from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
<commit_msg>ADD delete_associations method at EnvironmentManager<commit_after>from typing import List
from typing import Union
from .database_manager import DatabaseManager
from ..database.environment.environment import Environment
from ..database.environment.user_environment import UserEnvironment
from ..exceptions import (
EnvironmentNotFoundError,
)
class EnvironmentManager(DatabaseManager):
schema = Environment
user_env_association_schema = UserEnvironment
def __init__(self, database):
self._schema = EnvironmentManager.schema
self._association_schema = EnvironmentManager.user_env_association_schema
self.db = database
def association(self, user_id: str, env_id: str):
new_association_obj = self._association_schema(user=user_id, environment=env_id)
self.db.session.add(new_association_obj)
self.db.session.commit()
def get_environments(self, **kwargs):
objects = (
self.db.session.query(self._association_schema).filter_by(**kwargs).all()
)
return objects
def delete_associations(self, environment_id):
# Delete User environment Association
associations = (
self.db.session.query(self._association_schema)
.filter_by(environment=environment_id)
.all()
)
for association in associations:
self.db.session.delete(association)
self.db.session.commit()
def first(self, **kwargs) -> Union[None, List]:
result = super().first(**kwargs)
if not result:
raise EnvironmentNotFoundError
return result
def query(self, **kwargs) -> Union[None, List]:
results = super().query(**kwargs)
if len(results) == 0:
raise EnvironmentNotFoundError
return results
|
600ec67b175ca78c4dd72b4468368920ce390316
|
flask_controllers/GameModes.py
|
flask_controllers/GameModes.py
|
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
|
from flask import request
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
textonly = request.args.get('textmode', None)
if textonly:
return build_response(
html_status=200,
response_data=GameObject.game_modes,
response_mimetype="application/json"
)
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = GameObject.game_modes
# game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
|
Add text only mode to get game modes
|
Add text only mode to get game modes
|
Python
|
apache-2.0
|
dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server
|
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
Add text only mode to get game modes
|
from flask import request
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
textonly = request.args.get('textmode', None)
if textonly:
return build_response(
html_status=200,
response_data=GameObject.game_modes,
response_mimetype="application/json"
)
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = GameObject.game_modes
# game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
|
<commit_before>from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
<commit_msg>Add text only mode to get game modes<commit_after>
|
from flask import request
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
textonly = request.args.get('textmode', None)
if textonly:
return build_response(
html_status=200,
response_data=GameObject.game_modes,
response_mimetype="application/json"
)
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = GameObject.game_modes
# game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
|
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
Add text only mode to get game modesfrom flask import request
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
textonly = request.args.get('textmode', None)
if textonly:
return build_response(
html_status=200,
response_data=GameObject.game_modes,
response_mimetype="application/json"
)
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = GameObject.game_modes
# game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
|
<commit_before>from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
<commit_msg>Add text only mode to get game modes<commit_after>from flask import request
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
textonly = request.args.get('textmode', None)
if textonly:
return build_response(
html_status=200,
response_data=GameObject.game_modes,
response_mimetype="application/json"
)
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = GameObject.game_modes
# game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
|
8d409ab4ca35b38d97d17f0f443c8cdb62d5e58e
|
tests/tests/mendertesting.py
|
tests/tests/mendertesting.py
|
import pytest
class MenderTesting(object):
slow = pytest.mark.skipif(not pytest.config.getoption("--runslow"), reason="need --runslow option to run")
fast = pytest.mark.skipif(not pytest.config.getoption("--runfast"), reason="need --runfast option to run")
nightly = pytest.mark.skipif(not pytest.config.getoption("--runnightly"), reason="need --runnightly option to run")
|
import pytest
class MenderTesting(object):
slow_cond = False
fast_cond = False
nightly_cond = False
slow = None
fast = None
nightly = None
if pytest.config.getoption("--runslow"):
MenderTesting.slow_cond = True
else:
MenderTesting.slow_cond = False
if pytest.config.getoption("--runfast"):
MenderTesting.fast_cond = True
else:
MenderTesting.fast_cond = False
if pytest.config.getoption("--runnightly"):
MenderTesting.nightly_cond = True
else:
MenderTesting.nightly_cond = False
if not MenderTesting.slow_cond and not MenderTesting.fast_cond and not MenderTesting.nightly_cond:
# Default to running everything but nightly.
MenderTesting.slow_cond = True
MenderTesting.fast_cond = True
MenderTesting.slow = pytest.mark.skipif(not MenderTesting.slow_cond, reason="need --runslow option to run")
MenderTesting.fast = pytest.mark.skipif(not MenderTesting.fast_cond, reason="need --runfast option to run")
MenderTesting.nightly = pytest.mark.skipif(not MenderTesting.nightly_cond, reason="need --runnightly option to run")
|
Fix no tests running when not passing any options.
|
Fix no tests running when not passing any options.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>
|
Python
|
apache-2.0
|
pasinskim/integration,GregorioDiStefano/integration,pasinskim/integration,GregorioDiStefano/integration,pasinskim/integration
|
import pytest
class MenderTesting(object):
slow = pytest.mark.skipif(not pytest.config.getoption("--runslow"), reason="need --runslow option to run")
fast = pytest.mark.skipif(not pytest.config.getoption("--runfast"), reason="need --runfast option to run")
nightly = pytest.mark.skipif(not pytest.config.getoption("--runnightly"), reason="need --runnightly option to run")
Fix no tests running when not passing any options.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>
|
import pytest
class MenderTesting(object):
slow_cond = False
fast_cond = False
nightly_cond = False
slow = None
fast = None
nightly = None
if pytest.config.getoption("--runslow"):
MenderTesting.slow_cond = True
else:
MenderTesting.slow_cond = False
if pytest.config.getoption("--runfast"):
MenderTesting.fast_cond = True
else:
MenderTesting.fast_cond = False
if pytest.config.getoption("--runnightly"):
MenderTesting.nightly_cond = True
else:
MenderTesting.nightly_cond = False
if not MenderTesting.slow_cond and not MenderTesting.fast_cond and not MenderTesting.nightly_cond:
# Default to running everything but nightly.
MenderTesting.slow_cond = True
MenderTesting.fast_cond = True
MenderTesting.slow = pytest.mark.skipif(not MenderTesting.slow_cond, reason="need --runslow option to run")
MenderTesting.fast = pytest.mark.skipif(not MenderTesting.fast_cond, reason="need --runfast option to run")
MenderTesting.nightly = pytest.mark.skipif(not MenderTesting.nightly_cond, reason="need --runnightly option to run")
|
<commit_before>import pytest
class MenderTesting(object):
slow = pytest.mark.skipif(not pytest.config.getoption("--runslow"), reason="need --runslow option to run")
fast = pytest.mark.skipif(not pytest.config.getoption("--runfast"), reason="need --runfast option to run")
nightly = pytest.mark.skipif(not pytest.config.getoption("--runnightly"), reason="need --runnightly option to run")
<commit_msg>Fix no tests running when not passing any options.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io><commit_after>
|
import pytest
class MenderTesting(object):
slow_cond = False
fast_cond = False
nightly_cond = False
slow = None
fast = None
nightly = None
if pytest.config.getoption("--runslow"):
MenderTesting.slow_cond = True
else:
MenderTesting.slow_cond = False
if pytest.config.getoption("--runfast"):
MenderTesting.fast_cond = True
else:
MenderTesting.fast_cond = False
if pytest.config.getoption("--runnightly"):
MenderTesting.nightly_cond = True
else:
MenderTesting.nightly_cond = False
if not MenderTesting.slow_cond and not MenderTesting.fast_cond and not MenderTesting.nightly_cond:
# Default to running everything but nightly.
MenderTesting.slow_cond = True
MenderTesting.fast_cond = True
MenderTesting.slow = pytest.mark.skipif(not MenderTesting.slow_cond, reason="need --runslow option to run")
MenderTesting.fast = pytest.mark.skipif(not MenderTesting.fast_cond, reason="need --runfast option to run")
MenderTesting.nightly = pytest.mark.skipif(not MenderTesting.nightly_cond, reason="need --runnightly option to run")
|
import pytest
class MenderTesting(object):
slow = pytest.mark.skipif(not pytest.config.getoption("--runslow"), reason="need --runslow option to run")
fast = pytest.mark.skipif(not pytest.config.getoption("--runfast"), reason="need --runfast option to run")
nightly = pytest.mark.skipif(not pytest.config.getoption("--runnightly"), reason="need --runnightly option to run")
Fix no tests running when not passing any options.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io>import pytest
class MenderTesting(object):
slow_cond = False
fast_cond = False
nightly_cond = False
slow = None
fast = None
nightly = None
if pytest.config.getoption("--runslow"):
MenderTesting.slow_cond = True
else:
MenderTesting.slow_cond = False
if pytest.config.getoption("--runfast"):
MenderTesting.fast_cond = True
else:
MenderTesting.fast_cond = False
if pytest.config.getoption("--runnightly"):
MenderTesting.nightly_cond = True
else:
MenderTesting.nightly_cond = False
if not MenderTesting.slow_cond and not MenderTesting.fast_cond and not MenderTesting.nightly_cond:
# Default to running everything but nightly.
MenderTesting.slow_cond = True
MenderTesting.fast_cond = True
MenderTesting.slow = pytest.mark.skipif(not MenderTesting.slow_cond, reason="need --runslow option to run")
MenderTesting.fast = pytest.mark.skipif(not MenderTesting.fast_cond, reason="need --runfast option to run")
MenderTesting.nightly = pytest.mark.skipif(not MenderTesting.nightly_cond, reason="need --runnightly option to run")
|
<commit_before>import pytest
class MenderTesting(object):
slow = pytest.mark.skipif(not pytest.config.getoption("--runslow"), reason="need --runslow option to run")
fast = pytest.mark.skipif(not pytest.config.getoption("--runfast"), reason="need --runfast option to run")
nightly = pytest.mark.skipif(not pytest.config.getoption("--runnightly"), reason="need --runnightly option to run")
<commit_msg>Fix no tests running when not passing any options.
Signed-off-by: Kristian Amlie <505e66ae45028a0596c853559221f0b72c1cee21@mender.io><commit_after>import pytest
class MenderTesting(object):
slow_cond = False
fast_cond = False
nightly_cond = False
slow = None
fast = None
nightly = None
if pytest.config.getoption("--runslow"):
MenderTesting.slow_cond = True
else:
MenderTesting.slow_cond = False
if pytest.config.getoption("--runfast"):
MenderTesting.fast_cond = True
else:
MenderTesting.fast_cond = False
if pytest.config.getoption("--runnightly"):
MenderTesting.nightly_cond = True
else:
MenderTesting.nightly_cond = False
if not MenderTesting.slow_cond and not MenderTesting.fast_cond and not MenderTesting.nightly_cond:
# Default to running everything but nightly.
MenderTesting.slow_cond = True
MenderTesting.fast_cond = True
MenderTesting.slow = pytest.mark.skipif(not MenderTesting.slow_cond, reason="need --runslow option to run")
MenderTesting.fast = pytest.mark.skipif(not MenderTesting.fast_cond, reason="need --runfast option to run")
MenderTesting.nightly = pytest.mark.skipif(not MenderTesting.nightly_cond, reason="need --runnightly option to run")
|
92baaeca9065a769272bb9aafbcfef990620fe24
|
src/cm/utils/embed.py
|
src/cm/utils/embed.py
|
from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;" />'%(attrs, settings.SITE_URL, url)
return embed_code
|
from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;"></iframe>'%(attrs, settings.SITE_URL, url)
return embed_code
|
FIX : no autoclose iframe html element
|
FIX : no autoclose iframe html element
|
Python
|
agpl-3.0
|
co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt,co-ment/comt
|
from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;" />'%(attrs, settings.SITE_URL, url)
return embed_code
FIX : no autoclose iframe html element
|
from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;"></iframe>'%(attrs, settings.SITE_URL, url)
return embed_code
|
<commit_before>from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;" />'%(attrs, settings.SITE_URL, url)
return embed_code
<commit_msg>FIX : no autoclose iframe html element<commit_after>
|
from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;"></iframe>'%(attrs, settings.SITE_URL, url)
return embed_code
|
from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;" />'%(attrs, settings.SITE_URL, url)
return embed_code
FIX : no autoclose iframe html elementfrom django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;"></iframe>'%(attrs, settings.SITE_URL, url)
return embed_code
|
<commit_before>from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;" />'%(attrs, settings.SITE_URL, url)
return embed_code
<commit_msg>FIX : no autoclose iframe html element<commit_after>from django.core.urlresolvers import reverse
from django.conf import settings
def embed_html(text_key, attrs='', version_key=None, query_string="") :
if version_key :
url = reverse('text-view-comments-frame-version', args=[text_key, version_key])
else :
url = reverse('text-view-comments-frame', args=[text_key])
url += '?' + query_string
embed_code = '<iframe %s frameborder="0" src="%s%s" style="height: 200px; width: 99.9%%; position: relative; top: 0px;"></iframe>'%(attrs, settings.SITE_URL, url)
return embed_code
|
4585c5d0a69b190f55486a2cfb94a5c361bd4365
|
tests/pytests/functional/states/test_npm.py
|
tests/pytests/functional/states/test_npm.py
|
import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
except CommandExecutionError:
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
|
import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
# Just name the thing we're looking for
sminion.functions.npm # pylint: disable=pointless-statement
except (CommandExecutionError, AttributeError):
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
|
Check npm name as well
|
Check npm name as well
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
except CommandExecutionError:
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
Check npm name as well
|
import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
# Just name the thing we're looking for
sminion.functions.npm # pylint: disable=pointless-statement
except (CommandExecutionError, AttributeError):
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
|
<commit_before>import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
except CommandExecutionError:
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
<commit_msg>Check npm name as well<commit_after>
|
import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
# Just name the thing we're looking for
sminion.functions.npm # pylint: disable=pointless-statement
except (CommandExecutionError, AttributeError):
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
|
import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
except CommandExecutionError:
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
Check npm name as wellimport pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
# Just name the thing we're looking for
sminion.functions.npm # pylint: disable=pointless-statement
except (CommandExecutionError, AttributeError):
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
|
<commit_before>import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
except CommandExecutionError:
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
<commit_msg>Check npm name as well<commit_after>import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
# Just name the thing we're looking for
sminion.functions.npm # pylint: disable=pointless-statement
except (CommandExecutionError, AttributeError):
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
|
a16cffb7c3fe100e5e68a71e2dfcca26bf124464
|
prime-factors/prime_factors.py
|
prime-factors/prime_factors.py
|
# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number, n=2, factors=None):
if factors is None:
factors = []
for num in range(n, number):
if (number % num) == 0:
factors.append(num)
return prime_factors(number // num, num, factors)
else:
return factors
|
# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number):
num = 2
factors = []
while num <= number:
if (number % num) == 0:
number /= num
factors.append(num)
else:
num += 1
return factors
|
Change if condition to while with reformat
|
Change if condition to while with reformat
|
Python
|
mit
|
amalshehu/exercism-python
|
# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number, n=2, factors=None):
if factors is None:
factors = []
for num in range(n, number):
if (number % num) == 0:
factors.append(num)
return prime_factors(number // num, num, factors)
else:
return factors
Change if condition to while with reformat
|
# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number):
num = 2
factors = []
while num <= number:
if (number % num) == 0:
number /= num
factors.append(num)
else:
num += 1
return factors
|
<commit_before># File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number, n=2, factors=None):
if factors is None:
factors = []
for num in range(n, number):
if (number % num) == 0:
factors.append(num)
return prime_factors(number // num, num, factors)
else:
return factors
<commit_msg>Change if condition to while with reformat<commit_after>
|
# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number):
num = 2
factors = []
while num <= number:
if (number % num) == 0:
number /= num
factors.append(num)
else:
num += 1
return factors
|
# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number, n=2, factors=None):
if factors is None:
factors = []
for num in range(n, number):
if (number % num) == 0:
factors.append(num)
return prime_factors(number // num, num, factors)
else:
return factors
Change if condition to while with reformat# File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number):
num = 2
factors = []
while num <= number:
if (number % num) == 0:
number /= num
factors.append(num)
else:
num += 1
return factors
|
<commit_before># File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number, n=2, factors=None):
if factors is None:
factors = []
for num in range(n, number):
if (number % num) == 0:
factors.append(num)
return prime_factors(number // num, num, factors)
else:
return factors
<commit_msg>Change if condition to while with reformat<commit_after># File: prime_factors.py
# Purpose: Compute the prime factors of a given natural number.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Monday 26 September 2016, 12:05 AM
def prime_factors(number):
num = 2
factors = []
while num <= number:
if (number % num) == 0:
number /= num
factors.append(num)
else:
num += 1
return factors
|
7619513d29c5f7ae886963ced70315d42dbd1a9b
|
ogbot/core/researcher.py
|
ogbot/core/researcher.py
|
from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
|
from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
else:
self.logger.info("Nothing to research on planet %s" % planet)
|
Add logging if no research available
|
Add logging if no research available
|
Python
|
mit
|
yosh778/OG-Bot,yosh778/OG-Bot,yosh778/OG-Bot,winiciuscota/OG-Bot
|
from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
Add logging if no research available
|
from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
else:
self.logger.info("Nothing to research on planet %s" % planet)
|
<commit_before>from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
<commit_msg>Add logging if no research available<commit_after>
|
from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
else:
self.logger.info("Nothing to research on planet %s" % planet)
|
from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
Add logging if no research availablefrom base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
else:
self.logger.info("Nothing to research on planet %s" % planet)
|
<commit_before>from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
<commit_msg>Add logging if no research available<commit_after>from base import BaseBot
from scraping import research, general
class ResearcherBot(BaseBot):
def __init__(self, browser, config, planets):
self.research_client = research.Research(browser, config)
self.general_client = general.General(browser, config)
self.planets = planets
super(ResearcherBot, self).__init__(browser, config, planets)
def get_planet_for_research(self, planets=None):
if planets is None:
planets = self.planets
#for now the main planet will be used for research
return planets[0]
def get_next_research_item(self, planet):
available_research = self.research_client.get_available_research_for_planet(planet)
available_research_item = None
if available_research is not None:
available_research_item = available_research[0]
self.logger.info("Available Research:")
for item in available_research:
self.logger.info(" " + item.name)
# Favor ship upgrades
for item in available_research:
if item.id in [109, 110, 111]:
available_research_item = item
break
return available_research_item
def auto_research_next_item(self):
planet = self.get_planet_for_research(self.planets)
research = self.get_next_research_item(planet)
if research is not None:
self.research_client.research_item(research, planet)
else:
self.logger.info("Nothing to research on planet %s" % planet)
|
b8792d9164f669133032eb26ab78281acb17c9c5
|
appengine/standard/conftest.py
|
appengine/standard/conftest.py
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import six
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 and if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
import six
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 or if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
|
Fix lint issue and review comments
|
Fix lint issue and review comments
Change-Id: I02a53961b6411247ef06d84dad7b533cb97d89f7
|
Python
|
apache-2.0
|
canglade/NLP,hashems/Mobile-Cloud-Development-Projects,sharbison3/python-docs-samples,JavaRabbit/CS496_capstone,GoogleCloudPlatform/python-docs-samples,sharbison3/python-docs-samples,sharbison3/python-docs-samples,sharbison3/python-docs-samples,hashems/Mobile-Cloud-Development-Projects,JavaRabbit/CS496_capstone,BrandonY/python-docs-samples,BrandonY/python-docs-samples,BrandonY/python-docs-samples,canglade/NLP,GoogleCloudPlatform/python-docs-samples,canglade/NLP,hashems/Mobile-Cloud-Development-Projects,BrandonY/python-docs-samples,GoogleCloudPlatform/python-docs-samples,canglade/NLP,hashems/Mobile-Cloud-Development-Projects,JavaRabbit/CS496_capstone,JavaRabbit/CS496_capstone,GoogleCloudPlatform/python-docs-samples
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import six
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 and if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
Fix lint issue and review comments
Change-Id: I02a53961b6411247ef06d84dad7b533cb97d89f7
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
import six
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 or if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
|
<commit_before># Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import six
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 and if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
<commit_msg>Fix lint issue and review comments
Change-Id: I02a53961b6411247ef06d84dad7b533cb97d89f7<commit_after>
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
import six
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 or if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
|
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import six
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 and if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
Fix lint issue and review comments
Change-Id: I02a53961b6411247ef06d84dad7b533cb97d89f7# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
import six
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 or if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
|
<commit_before># Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import six
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 and if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
<commit_msg>Fix lint issue and review comments
Change-Id: I02a53961b6411247ef06d84dad7b533cb97d89f7<commit_after># Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
# Import py.test hooks and fixtures for App Engine
from gcp.testing.appengine import (
login,
pytest_configure,
pytest_runtest_call,
run_tasks,
testbed)
import six
(login)
(pytest_configure)
(pytest_runtest_call)
(run_tasks)
(testbed)
def pytest_ignore_collect(path, config):
"""Skip App Engine tests in python 3 or if no SDK is available."""
if 'appengine/standard' in str(path):
if six.PY3:
return True
if 'GAE_SDK_PATH' not in os.environ:
return True
return False
|
e921df4218053b1afe2a60262516873e96ac2679
|
slave/skia_slave_scripts/flavor_utils/xsan_build_step_utils.py
|
slave/skia_slave_scripts/flavor_utils/xsan_build_step_utils.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
def RunFlavoredCmd(self, app, args):
os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp'
return shell_utils.run([self._PathToBinary(app)] + args)
|
Use tools/tsan.supp for TSAN suppressions.
|
Use tools/tsan.supp for TSAN suppressions.
BUG=skia:
R=borenet@google.com, mtklein@google.com
Author: mtklein@chromium.org
Review URL: https://codereview.chromium.org/266393003
|
Python
|
bsd-3-clause
|
Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
Use tools/tsan.supp for TSAN suppressions.
BUG=skia:
R=borenet@google.com, mtklein@google.com
Author: mtklein@chromium.org
Review URL: https://codereview.chromium.org/266393003
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
def RunFlavoredCmd(self, app, args):
os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp'
return shell_utils.run([self._PathToBinary(app)] + args)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
<commit_msg>Use tools/tsan.supp for TSAN suppressions.
BUG=skia:
R=borenet@google.com, mtklein@google.com
Author: mtklein@chromium.org
Review URL: https://codereview.chromium.org/266393003<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
def RunFlavoredCmd(self, app, args):
os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp'
return shell_utils.run([self._PathToBinary(app)] + args)
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
Use tools/tsan.supp for TSAN suppressions.
BUG=skia:
R=borenet@google.com, mtklein@google.com
Author: mtklein@chromium.org
Review URL: https://codereview.chromium.org/266393003#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
def RunFlavoredCmd(self, app, args):
os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp'
return shell_utils.run([self._PathToBinary(app)] + args)
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
<commit_msg>Use tools/tsan.supp for TSAN suppressions.
BUG=skia:
R=borenet@google.com, mtklein@google.com
Author: mtklein@chromium.org
Review URL: https://codereview.chromium.org/266393003<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Utilities for ASAN,TSAN,etc. build steps. """
from default_build_step_utils import DefaultBuildStepUtils
from utils import shell_utils
import os
LLVM_PATH = '/home/chrome-bot/llvm-3.4/Release+Asserts/bin/'
class XsanBuildStepUtils(DefaultBuildStepUtils):
def Compile(self, target):
# Run the xsan_build script.
os.environ['PATH'] = LLVM_PATH + ':' + os.environ['PATH']
shell_utils.run(['which', 'clang'])
shell_utils.run(['clang', '--version'])
os.environ['GYP_DEFINES'] = self._step.args['gyp_defines']
print 'GYP_DEFINES="%s"' % os.environ['GYP_DEFINES']
cmd = [
os.path.join('tools', 'xsan_build'),
self._step.args['sanitizer'],
target,
'BUILDTYPE=%s' % self._step.configuration,
]
cmd.extend(self._step.default_make_flags)
cmd.extend(self._step.make_flags)
shell_utils.run(cmd)
def RunFlavoredCmd(self, app, args):
os.environ['TSAN_OPTIONS'] = 'suppressions=tools/tsan.supp'
return shell_utils.run([self._PathToBinary(app)] + args)
|
a3f9b4d7a82335cadaba09167a6ac873733646fa
|
lambda_function.py
|
lambda_function.py
|
#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
|
#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
if __name__ == '__main__':
lambda_handler()
|
Add main routine to lambda handler
|
Add main routine to lambda handler
|
Python
|
apache-2.0
|
ajorg/DMR_contacts
|
#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
Add main routine to lambda handler
|
#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
if __name__ == '__main__':
lambda_handler()
|
<commit_before>#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
<commit_msg>Add main routine to lambda handler<commit_after>
|
#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
if __name__ == '__main__':
lambda_handler()
|
#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
Add main routine to lambda handler#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
if __name__ == '__main__':
lambda_handler()
|
<commit_before>#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
<commit_msg>Add main routine to lambda handler<commit_after>#!/usr/bin/env python2
from StringIO import StringIO
import boto3
from dmr_marc_users_cs750 import (
get_users, get_groups,
write_contacts_csv,
write_contacts_xlsx
)
def lambda_handler(event=None, context=None):
users = get_users()
csvo = StringIO()
write_contacts_csv(users, csvo)
s3 = boto3.client('s3')
s3.put_object(
Bucket='dmr-contacts', Key='DMR_contacts.csv',
Body=csvo.getvalue(), ContentType='text/csv', ACL='public-read')
csvo.close()
groups = get_groups()
xlsxo = StringIO()
write_contacts_xlsx(groups + users, xlsxo)
s3.put_object(
Bucket='dmr-contacts', Key='contacts-dci.xlsx',
Body=xlsxo.getvalue(),
ContentType=('application/'
'vnd.openxmlformats-officedocument.spreadsheetml.sheet'),
ACL='public-read')
xlsxo.close()
if __name__ == '__main__':
lambda_handler()
|
fd18cd0867f925b3b7abf214cbfb3c2f8f9101cb
|
uri_extension_to_dict.py
|
uri_extension_to_dict.py
|
#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and "GET" in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
|
#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and 'GET' in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
|
Replace double quote by simple quote, no need interpolation
|
Replace double quote by simple quote, no need interpolation
|
Python
|
mit
|
gdelpierre/scripts,gdelpierre/scripts
|
#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and "GET" in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
Replace double quote by simple quote, no need interpolation
|
#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and 'GET' in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
|
<commit_before>#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and "GET" in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
<commit_msg>Replace double quote by simple quote, no need interpolation<commit_after>
|
#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and 'GET' in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
|
#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and "GET" in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
Replace double quote by simple quote, no need interpolation#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and 'GET' in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
|
<commit_before>#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and "GET" in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
<commit_msg>Replace double quote by simple quote, no need interpolation<commit_after>#! /usr/bin/env python2.7
from collections import Counter
from pprint import pprint
from re import search
from sys import argv
def count_extension(filename):
with open(filename, 'r') as file:
# New empty counter.
ext_dict = Counter()
for line in file:
# Remove newlines / carriage returns.
line = line.strip()
# Should be a non-empty line, with 200 OK and GET method.
if line and 'GET' in line and line.split('|')[13] == '200':
ext_line = line.split('|')[3]
if '.' in ext_line:
# extensions should be like this regex.
clean_ext = search('[a-zA-Z0-9]+', \
ext_line.split('.')[-1])
# If regex returning None or a digit, do not add it.
if clean_ext is not None and \
clean_ext.group(0).isdigit() is not True:
# lower the extension.
ext_dict[clean_ext.group(0).lower()] += 1
pprint(sorted(((v,k) for k,v in ext_dict.iteritems()), reverse=True))
if __name__ == '__main__':
count_extension(argv[1])
|
0d19453290043ecf96fe3219d00140146b699cb9
|
python/servo/devenv_commands.py
|
python/servo/devenv_commands.py
|
from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cervo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
|
from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cargo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
|
Fix typo in 'mach cargo --help'
|
Fix typo in 'mach cargo --help'
|
Python
|
mpl-2.0
|
codemac/servo,AnthonyBroadCrawford/servo,zhangjunlei26/servo,Shraddha512/servo,boghison/servo,CJ8664/servo,codemac/servo,ConnorGBrewster/servo,jlegendary/servo,rnestler/servo,notriddle/servo,upsuper/servo,snf/servo,Shraddha512/servo,mdibaiee/servo,samfoo/servo,dagnir/servo,pyfisch/servo,pgonda/servo,ConnorGBrewster/servo,snf/servo,aweinstock314/servo,deokjinkim/servo,jdramani/servo,evilpie/servo,bjwbell/servo,thiagopnts/servo,pyecs/servo,fiji-flo/servo,DominoTree/servo,michaelwu/servo,AnthonyBroadCrawford/servo,michaelwu/servo,meh/servo,bfrohs/servo,ryancanhelpyou/servo,cbrewster/servo,g-k/servo,dmarcos/servo,tschneidereit/servo,nrc/servo,wartman4404/servo,echochamber/servo,canaltinova/servo,tempbottle/servo,splav/servo,dati91/servo,indykish/servo,zentner-kyle/servo,thiagopnts/servo,wpgallih/servo,dati91/servo,jlegendary/servo,GyrosOfWar/servo,youprofit/servo,wpgallih/servo,peterjoel/servo,KiChjang/servo,youprofit/servo,SimonSapin/servo,szeged/servo,meh/servo,avadacatavra/servo,splav/servo,peterjoel/servo,rixrix/servo,saneyuki/servo,mattnenterprise/servo,jlegendary/servo,youprofit/servo,nnethercote/servo,bjwbell/servo,notriddle/servo,tempbottle/servo,shrenikgala/servo,deokjinkim/servo,CJ8664/servo,aidanhs/servo,nick-thompson/servo,DominoTree/servo,deokjinkim/servo,rixrix/servo,codemac/servo,nerith/servo,tschneidereit/servo,wartman4404/servo,fiji-flo/servo,rnestler/servo,g-k/servo,deokjinkim/servo,froydnj/servo,SimonSapin/servo,dmarcos/servo,szeged/servo,shrenikgala/servo,AnthonyBroadCrawford/servo,pyfisch/servo,snf/servo,tempbottle/servo,splav/servo,GyrosOfWar/servo,mbrubeck/servo,samfoo/servo,KiChjang/servo,larsbergstrom/servo,rixrix/servo,dmarcos/servo,Shraddha512/servo,szeged/servo,pgonda/servo,evilpie/servo,nrc/servo,notriddle/servo,echochamber/servo,ConnorGBrewster/servo,caldwell/servo,larsbergstrom/servo,upsuper/servo,zentner-kyle/servo,CJ8664/servo,KiChjang/servo,indykish/servo,zhangjunlei26/servo,sadmansk/servo,luniv/servo,cbrewster/servo,jlegendary/servo,srbhklkrn/SERVOENGINE,dati91/servo,huonw/servo,canaltinova/servo,jimberlage/servo,deokjinkim/servo,GreenRecycleBin/servo,splav/servo,codemac/servo,dati91/servo,brendandahl/servo,saneyuki/servo,shrenikgala/servo,mt2d2/servo,dhananjay92/servo,anthgur/servo,ruud-v-a/servo,zhangjunlei26/servo,juzer10/servo,dati91/servo,fiji-flo/servo,peterjoel/servo,bfrohs/servo,GreenRecycleBin/servo,vks/servo,caldwell/servo,avadacatavra/servo,dvberkel/servo,caldwell/servo,upsuper/servo,nerith/servo,huonw/servo,paulrouget/servo,larsbergstrom/servo,jlegendary/servo,snf/servo,steveklabnik/servo,zentner-kyle/servo,hyowon/servo,karlito40/servo,mattnenterprise/servo,mattnenterprise/servo,mbrubeck/servo,dvberkel/servo,A-deLuna/servo,j3parker/servo,peterjoel/servo,A-deLuna/servo,eddyb/servo,Shraddha512/servo,DominoTree/servo,jlegendary/servo,pgonda/servo,anthgur/servo,dvberkel/servo,luniv/servo,rnestler/servo,fiji-flo/servo,nick-thompson/servo,pgonda/servo,thiagopnts/servo,pyecs/servo,A-deLuna/servo,wpgallih/servo,KiChjang/servo,mbrubeck/servo,ryancanhelpyou/servo,g-k/servo,emilio/servo,jlegendary/servo,aidanhs/servo,srbhklkrn/SERVOENGINE,kindersung/servo,kindersung/servo,snf/servo,WriterOfAlicrow/servo,aidanhs/servo,tschneidereit/servo,karlito40/servo,g-k/servo,notriddle/servo,steveklabnik/servo,zhangjunlei26/servo,WriterOfAlicrow/servo,tschneidereit/servo,jgraham/servo,jimberlage/servo,juzer10/servo,bfrohs/servo,caldwell/servo,emilio/servo,GreenRecycleBin/servo,s142857/servo,huonw/servo,dagnir/servo,mdibaiee/servo,rixrix/servo,michaelwu/servo,steveklabnik/servo,indykish/servo,sadmansk/servo,peterjoel/servo,larsbergstrom/servo,jimberlage/servo,cbrewster/servo,splav/servo,pyfisch/servo,pyfisch/servo,nerith/servo,canaltinova/servo,indykish/servo,michaelwu/servo,mbrubeck/servo,vks/servo,upsuper/servo,dmarcos/servo,tafia/servo,pyecs/servo,runarberg/servo,vks/servo,splav/servo,notriddle/servo,pyfisch/servo,indykish/servo,upsuper/servo,KiChjang/servo,rnestler/servo,zhangjunlei26/servo,juzer10/servo,brendandahl/servo,rixrix/servo,tafia/servo,A-deLuna/servo,indykish/servo,dsandeephegde/servo,pyfisch/servo,chotchki/servo,hyowon/servo,Adenilson/prototype-viewing-distance,vks/servo,pyfisch/servo,upsuper/servo,jdramani/servo,cbrewster/servo,rentongzhang/servo,aweinstock314/servo,tempbottle/servo,peterjoel/servo,notriddle/servo,szeged/servo,jdramani/servo,mt2d2/servo,runarberg/servo,steveklabnik/servo,mattnenterprise/servo,huonw/servo,GreenRecycleBin/servo,CJ8664/servo,pyfisch/servo,nerith/servo,Adenilson/prototype-viewing-distance,aidanhs/servo,RenaudParis/servo,bjwbell/servo,chotchki/servo,cbrewster/servo,jgraham/servo,fiji-flo/servo,codemac/servo,WriterOfAlicrow/servo,avadacatavra/servo,thiagopnts/servo,saneyuki/servo,nrc/servo,runarberg/servo,walac/servo,walac/servo,eddyb/servo,youprofit/servo,mukilan/servo,RenaudParis/servo,juzer10/servo,s142857/servo,rixrix/servo,Shraddha512/servo,mukilan/servo,saratang/servo,indykish/servo,paulrouget/servo,bfrohs/servo,akosel/servo,mdibaiee/servo,cbrewster/servo,akosel/servo,vks/servo,meh/servo,SimonSapin/servo,nick-thompson/servo,wartman4404/servo,fiji-flo/servo,walac/servo,codemac/servo,cbrewster/servo,AnthonyBroadCrawford/servo,sadmansk/servo,jgraham/servo,ConnorGBrewster/servo,dati91/servo,samfoo/servo,dagnir/servo,evilpie/servo,nnethercote/servo,emilio/servo,aidanhs/servo,pgonda/servo,rnestler/servo,samfoo/servo,Adenilson/prototype-viewing-distance,ruud-v-a/servo,szeged/servo,RenaudParis/servo,mdibaiee/servo,nnethercote/servo,pyfisch/servo,wpgallih/servo,CJ8664/servo,DominoTree/servo,szeged/servo,rixrix/servo,CJ8664/servo,bfrohs/servo,splav/servo,avadacatavra/servo,kindersung/servo,sadmansk/servo,dhananjay92/servo,Shraddha512/servo,jgraham/servo,kindersung/servo,GreenRecycleBin/servo,brendandahl/servo,rnestler/servo,notriddle/servo,anthgur/servo,SimonSapin/servo,GreenRecycleBin/servo,pyecs/servo,pyecs/servo,eddyb/servo,srbhklkrn/SERVOENGINE,sadmansk/servo,rentongzhang/servo,shrenikgala/servo,pyecs/servo,mattnenterprise/servo,WriterOfAlicrow/servo,eddyb/servo,boghison/servo,mdibaiee/servo,aweinstock314/servo,dati91/servo,eddyb/servo,dvberkel/servo,jdramani/servo,larsbergstrom/servo,michaelwu/servo,nnethercote/servo,bjwbell/servo,froydnj/servo,GreenRecycleBin/servo,wartman4404/servo,KiChjang/servo,luniv/servo,RenaudParis/servo,hyowon/servo,sadmansk/servo,luniv/servo,CJ8664/servo,mattnenterprise/servo,karlito40/servo,ruud-v-a/servo,dsandeephegde/servo,juzer10/servo,mattnenterprise/servo,AnthonyBroadCrawford/servo,tschneidereit/servo,hyowon/servo,notriddle/servo,emilio/servo,vks/servo,jdramani/servo,avadacatavra/servo,tafia/servo,caldwell/servo,youprofit/servo,mt2d2/servo,dvberkel/servo,dhananjay92/servo,evilpie/servo,mukilan/servo,rixrix/servo,larsbergstrom/servo,paulrouget/servo,evilpie/servo,jgraham/servo,michaelwu/servo,paulrouget/servo,avadacatavra/servo,peterjoel/servo,tafia/servo,thiagopnts/servo,zhangjunlei26/servo,j3parker/servo,eddyb/servo,snf/servo,luniv/servo,rentongzhang/servo,mbrubeck/servo,akosel/servo,dagnir/servo,s142857/servo,dhananjay92/servo,paulrouget/servo,nnethercote/servo,splav/servo,saneyuki/servo,KiChjang/servo,AnthonyBroadCrawford/servo,SimonSapin/servo,meh/servo,ConnorGBrewster/servo,rentongzhang/servo,luniv/servo,youprofit/servo,j3parker/servo,bjwbell/servo,zentner-kyle/servo,karlito40/servo,saratang/servo,szeged/servo,evilpie/servo,saneyuki/servo,mt2d2/servo,youprofit/servo,rentongzhang/servo,samfoo/servo,mattnenterprise/servo,echochamber/servo,avadacatavra/servo,saneyuki/servo,upsuper/servo,karlito40/servo,Adenilson/prototype-viewing-distance,fiji-flo/servo,saneyuki/servo,j3parker/servo,wartman4404/servo,thiagopnts/servo,peterjoel/servo,szeged/servo,GyrosOfWar/servo,meh/servo,DominoTree/servo,wpgallih/servo,notriddle/servo,nick-thompson/servo,froydnj/servo,emilio/servo,pyfisch/servo,avadacatavra/servo,shrenikgala/servo,mt2d2/servo,aidanhs/servo,WriterOfAlicrow/servo,j3parker/servo,GyrosOfWar/servo,echochamber/servo,tempbottle/servo,jimberlage/servo,WriterOfAlicrow/servo,DominoTree/servo,aidanhs/servo,ryancanhelpyou/servo,nnethercote/servo,zhangjunlei26/servo,fiji-flo/servo,nick-thompson/servo,nerith/servo,rentongzhang/servo,g-k/servo,RenaudParis/servo,paulrouget/servo,hyowon/servo,larsbergstrom/servo,shrenikgala/servo,notriddle/servo,wpgallih/servo,mbrubeck/servo,emilio/servo,huonw/servo,zhangjunlei26/servo,juzer10/servo,huonw/servo,emilio/servo,larsbergstrom/servo,saneyuki/servo,canaltinova/servo,deokjinkim/servo,nnethercote/servo,szeged/servo,SimonSapin/servo,dsandeephegde/servo,runarberg/servo,KiChjang/servo,AnthonyBroadCrawford/servo,mt2d2/servo,j3parker/servo,g-k/servo,j3parker/servo,mdibaiee/servo,ConnorGBrewster/servo,bfrohs/servo,thiagopnts/servo,dagnir/servo,rixrix/servo,anthgur/servo,emilio/servo,brendandahl/servo,evilpie/servo,ruud-v-a/servo,srbhklkrn/SERVOENGINE,boghison/servo,brendandahl/servo,larsbergstrom/servo,dsandeephegde/servo,runarberg/servo,dsandeephegde/servo,samfoo/servo,indykish/servo,codemac/servo,nrc/servo,paulrouget/servo,CJ8664/servo,jdramani/servo,mbrubeck/servo,dmarcos/servo,ryancanhelpyou/servo,luniv/servo,srbhklkrn/SERVOENGINE,froydnj/servo,SimonSapin/servo,KiChjang/servo,aweinstock314/servo,sadmansk/servo,nerith/servo,dhananjay92/servo,saneyuki/servo,dagnir/servo,mukilan/servo,splav/servo,paulrouget/servo,walac/servo,snf/servo,shrenikgala/servo,zentner-kyle/servo,dmarcos/servo,jimberlage/servo,nrc/servo,wartman4404/servo,tempbottle/servo,szeged/servo,steveklabnik/servo,splav/servo,DominoTree/servo,ryancanhelpyou/servo,GreenRecycleBin/servo,Adenilson/prototype-viewing-distance,rnestler/servo,SimonSapin/servo,WriterOfAlicrow/servo,caldwell/servo,aweinstock314/servo,deokjinkim/servo,saratang/servo,Adenilson/prototype-viewing-distance,jdramani/servo,boghison/servo,kindersung/servo,saratang/servo,evilpie/servo,steveklabnik/servo,boghison/servo,karlito40/servo,dsandeephegde/servo,anthgur/servo,steveklabnik/servo,ruud-v-a/servo,pgonda/servo,DominoTree/servo,chotchki/servo,huonw/servo,dagnir/servo,zentner-kyle/servo,akosel/servo,runarberg/servo,larsbergstrom/servo,zentner-kyle/servo,RenaudParis/servo,pyecs/servo,tafia/servo,nick-thompson/servo,dhananjay92/servo,A-deLuna/servo,evilpie/servo,Adenilson/prototype-viewing-distance,dvberkel/servo,srbhklkrn/SERVOENGINE,mbrubeck/servo,thiagopnts/servo,sadmansk/servo,emilio/servo,bjwbell/servo,jimberlage/servo,Shraddha512/servo,kindersung/servo,boghison/servo,eddyb/servo,wpgallih/servo,akosel/servo,upsuper/servo,walac/servo,froydnj/servo,aweinstock314/servo,akosel/servo,nerith/servo,A-deLuna/servo,ConnorGBrewster/servo,rentongzhang/servo,hyowon/servo,s142857/servo,peterjoel/servo,mukilan/servo,canaltinova/servo,DominoTree/servo,KiChjang/servo,nick-thompson/servo,boghison/servo,dsandeephegde/servo,dmarcos/servo,tschneidereit/servo,ConnorGBrewster/servo,s142857/servo,brendandahl/servo,nnethercote/servo,ryancanhelpyou/servo,dhananjay92/servo,echochamber/servo,saratang/servo,ruud-v-a/servo,ryancanhelpyou/servo,DominoTree/servo,samfoo/servo,echochamber/servo,peterjoel/servo,cbrewster/servo,anthgur/servo,wpgallih/servo,nrc/servo,chotchki/servo,jimberlage/servo,mdibaiee/servo,GyrosOfWar/servo,nnethercote/servo,paulrouget/servo,eddyb/servo,paulrouget/servo,A-deLuna/servo,hyowon/servo,anthgur/servo,meh/servo,g-k/servo,dati91/servo,mt2d2/servo,walac/servo,srbhklkrn/SERVOENGINE,froydnj/servo,jimberlage/servo,wartman4404/servo,canaltinova/servo,jimberlage/servo,canaltinova/servo,saratang/servo,indykish/servo,chotchki/servo,saratang/servo,RenaudParis/servo,brendandahl/servo,mukilan/servo,dvberkel/servo,kindersung/servo,zhangjunlei26/servo,tafia/servo,GreenRecycleBin/servo,emilio/servo,walac/servo,juzer10/servo,jgraham/servo,saneyuki/servo,wpgallih/servo,GyrosOfWar/servo,GyrosOfWar/servo,s142857/servo,akosel/servo,vks/servo,tempbottle/servo,dsandeephegde/servo,echochamber/servo,karlito40/servo,aweinstock314/servo,nnethercote/servo,pgonda/servo,canaltinova/servo,s142857/servo,rnestler/servo,tafia/servo,runarberg/servo,meh/servo,bjwbell/servo,jgraham/servo,caldwell/servo,tschneidereit/servo,nrc/servo,chotchki/servo,anthgur/servo
|
from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cervo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
Fix typo in 'mach cargo --help'
|
from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cargo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
|
<commit_before>from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cervo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
<commit_msg>Fix typo in 'mach cargo --help'<commit_after>
|
from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cargo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
|
from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cervo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
Fix typo in 'mach cargo --help'from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cargo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
|
<commit_before>from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cervo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
<commit_msg>Fix typo in 'mach cargo --help'<commit_after>from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cargo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
|
b7a80f92b4e2e7227efe5712e512f5a75bc4e67c
|
locales/seattle/librenms.py
|
locales/seattle/librenms.py
|
#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
}
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
|
#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
},
"all": [device['hostname'] for device in librenms['devices']],
"seattle": [device['hostname'] for device in librenms['devices']],
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
|
Add seattle group to LibreNMS dynamic inventory to trigger seattle group vars.
|
Add seattle group to LibreNMS dynamic inventory to trigger seattle group vars.
|
Python
|
apache-2.0
|
HamWAN/infrastructure-configs,HamWAN/infrastructure-configs,HamWAN/infrastructure-configs
|
#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
}
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
Add seattle group to LibreNMS dynamic inventory to trigger seattle group vars.
|
#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
},
"all": [device['hostname'] for device in librenms['devices']],
"seattle": [device['hostname'] for device in librenms['devices']],
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
|
<commit_before>#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
}
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
<commit_msg>Add seattle group to LibreNMS dynamic inventory to trigger seattle group vars.<commit_after>
|
#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
},
"all": [device['hostname'] for device in librenms['devices']],
"seattle": [device['hostname'] for device in librenms['devices']],
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
|
#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
}
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
Add seattle group to LibreNMS dynamic inventory to trigger seattle group vars.#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
},
"all": [device['hostname'] for device in librenms['devices']],
"seattle": [device['hostname'] for device in librenms['devices']],
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
|
<commit_before>#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
}
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
<commit_msg>Add seattle group to LibreNMS dynamic inventory to trigger seattle group vars.<commit_after>#!/usr/bin/env python
import json
import urllib2
librenms = json.loads(
urllib2.urlopen(urllib2.Request(
'https://librenms.hamwan.org/api/v0/devices',
headers={'X-Auth-Token': '600dc6857a6e2bf200b46e56b78c0049'},
)).read()
)
inventory = {
"_meta": {
"hostvars": {}
},
"all": [device['hostname'] for device in librenms['devices']],
"seattle": [device['hostname'] for device in librenms['devices']],
}
for key in ('os', 'sysName', 'type', 'version'):
for device in librenms['devices']:
group = device.get(key)
if not group:
continue
if not inventory.get(group):
inventory[group] = []
inventory[group].append(device['hostname'])
# converts the 'status' field to an 'available' list
inventory['available'] = [device['hostname'] for device in librenms['devices']
if int(device.get('status'))]
print json.dumps(inventory, indent=2)
|
46b8a4d0668c764df85f1e8a94672d81dd112beb
|
maps/api/views.py
|
maps/api/views.py
|
from django.http import HttpResponse
def list_question_sets(request):
return HttpResponse('Lol, udachi')
|
import json
from django.http import HttpResponse
from maps.models import QuestionSet
def list_question_sets(request):
objects = QuestionSet.objects.all()
items = []
for obj in objects:
items.append({
'title': obj.title,
'max_duration': obj.max_duration.seconds,
'creator': {
'full_name': obj.creator.get_full_name()
}
})
return HttpResponse(json.dumps(items))
|
Add API method for question sets list
|
Add API method for question sets list
|
Python
|
mit
|
sevazhidkov/greenland,sevazhidkov/greenland
|
from django.http import HttpResponse
def list_question_sets(request):
return HttpResponse('Lol, udachi')
Add API method for question sets list
|
import json
from django.http import HttpResponse
from maps.models import QuestionSet
def list_question_sets(request):
objects = QuestionSet.objects.all()
items = []
for obj in objects:
items.append({
'title': obj.title,
'max_duration': obj.max_duration.seconds,
'creator': {
'full_name': obj.creator.get_full_name()
}
})
return HttpResponse(json.dumps(items))
|
<commit_before>from django.http import HttpResponse
def list_question_sets(request):
return HttpResponse('Lol, udachi')
<commit_msg>Add API method for question sets list<commit_after>
|
import json
from django.http import HttpResponse
from maps.models import QuestionSet
def list_question_sets(request):
objects = QuestionSet.objects.all()
items = []
for obj in objects:
items.append({
'title': obj.title,
'max_duration': obj.max_duration.seconds,
'creator': {
'full_name': obj.creator.get_full_name()
}
})
return HttpResponse(json.dumps(items))
|
from django.http import HttpResponse
def list_question_sets(request):
return HttpResponse('Lol, udachi')
Add API method for question sets listimport json
from django.http import HttpResponse
from maps.models import QuestionSet
def list_question_sets(request):
objects = QuestionSet.objects.all()
items = []
for obj in objects:
items.append({
'title': obj.title,
'max_duration': obj.max_duration.seconds,
'creator': {
'full_name': obj.creator.get_full_name()
}
})
return HttpResponse(json.dumps(items))
|
<commit_before>from django.http import HttpResponse
def list_question_sets(request):
return HttpResponse('Lol, udachi')
<commit_msg>Add API method for question sets list<commit_after>import json
from django.http import HttpResponse
from maps.models import QuestionSet
def list_question_sets(request):
objects = QuestionSet.objects.all()
items = []
for obj in objects:
items.append({
'title': obj.title,
'max_duration': obj.max_duration.seconds,
'creator': {
'full_name': obj.creator.get_full_name()
}
})
return HttpResponse(json.dumps(items))
|
3152ee5ca2f21708e428faac5eaadbb403d0a1dc
|
spacy/tests/serialize/test_serialize_tokenizer.py
|
spacy/tests/serialize/test_serialize_tokenizer.py
|
# coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
import pytest
@pytest.mark.parametrize('text', ["I can't do this"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer_b = en_tokenizer.to_bytes()
new_tokenizer = en_tokenizer.from_bytes(tokenizer_b)
assert new_tokenizer.to_bytes() == tokenizer_b
doc1 = en_tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
|
# coding: utf-8
from __future__ import unicode_literals
from ...util import get_lang_class
from ..util import make_tempdir, assert_packed_msg_equal
import pytest
def load_tokenizer(b):
tok = get_lang_class('en').Defaults.create_tokenizer()
tok.from_bytes(b)
return tok
@pytest.mark.parametrize('text', ["I💜you", "they’re", "“hello”"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer = en_tokenizer
new_tokenizer = load_tokenizer(tokenizer.to_bytes())
assert_packed_msg_equal(new_tokenizer.to_bytes(), tokenizer.to_bytes())
# assert new_tokenizer.to_bytes() == tokenizer.to_bytes()
doc1 = tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
|
Update serialization tests for tokenizer
|
Update serialization tests for tokenizer
|
Python
|
mit
|
honnibal/spaCy,honnibal/spaCy,honnibal/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,aikramer2/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy
|
# coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
import pytest
@pytest.mark.parametrize('text', ["I can't do this"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer_b = en_tokenizer.to_bytes()
new_tokenizer = en_tokenizer.from_bytes(tokenizer_b)
assert new_tokenizer.to_bytes() == tokenizer_b
doc1 = en_tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
Update serialization tests for tokenizer
|
# coding: utf-8
from __future__ import unicode_literals
from ...util import get_lang_class
from ..util import make_tempdir, assert_packed_msg_equal
import pytest
def load_tokenizer(b):
tok = get_lang_class('en').Defaults.create_tokenizer()
tok.from_bytes(b)
return tok
@pytest.mark.parametrize('text', ["I💜you", "they’re", "“hello”"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer = en_tokenizer
new_tokenizer = load_tokenizer(tokenizer.to_bytes())
assert_packed_msg_equal(new_tokenizer.to_bytes(), tokenizer.to_bytes())
# assert new_tokenizer.to_bytes() == tokenizer.to_bytes()
doc1 = tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
import pytest
@pytest.mark.parametrize('text', ["I can't do this"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer_b = en_tokenizer.to_bytes()
new_tokenizer = en_tokenizer.from_bytes(tokenizer_b)
assert new_tokenizer.to_bytes() == tokenizer_b
doc1 = en_tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
<commit_msg>Update serialization tests for tokenizer<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
from ...util import get_lang_class
from ..util import make_tempdir, assert_packed_msg_equal
import pytest
def load_tokenizer(b):
tok = get_lang_class('en').Defaults.create_tokenizer()
tok.from_bytes(b)
return tok
@pytest.mark.parametrize('text', ["I💜you", "they’re", "“hello”"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer = en_tokenizer
new_tokenizer = load_tokenizer(tokenizer.to_bytes())
assert_packed_msg_equal(new_tokenizer.to_bytes(), tokenizer.to_bytes())
# assert new_tokenizer.to_bytes() == tokenizer.to_bytes()
doc1 = tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
|
# coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
import pytest
@pytest.mark.parametrize('text', ["I can't do this"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer_b = en_tokenizer.to_bytes()
new_tokenizer = en_tokenizer.from_bytes(tokenizer_b)
assert new_tokenizer.to_bytes() == tokenizer_b
doc1 = en_tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
Update serialization tests for tokenizer# coding: utf-8
from __future__ import unicode_literals
from ...util import get_lang_class
from ..util import make_tempdir, assert_packed_msg_equal
import pytest
def load_tokenizer(b):
tok = get_lang_class('en').Defaults.create_tokenizer()
tok.from_bytes(b)
return tok
@pytest.mark.parametrize('text', ["I💜you", "they’re", "“hello”"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer = en_tokenizer
new_tokenizer = load_tokenizer(tokenizer.to_bytes())
assert_packed_msg_equal(new_tokenizer.to_bytes(), tokenizer.to_bytes())
# assert new_tokenizer.to_bytes() == tokenizer.to_bytes()
doc1 = tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
|
<commit_before># coding: utf-8
from __future__ import unicode_literals
from ..util import make_tempdir
import pytest
@pytest.mark.parametrize('text', ["I can't do this"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer_b = en_tokenizer.to_bytes()
new_tokenizer = en_tokenizer.from_bytes(tokenizer_b)
assert new_tokenizer.to_bytes() == tokenizer_b
doc1 = en_tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
<commit_msg>Update serialization tests for tokenizer<commit_after># coding: utf-8
from __future__ import unicode_literals
from ...util import get_lang_class
from ..util import make_tempdir, assert_packed_msg_equal
import pytest
def load_tokenizer(b):
tok = get_lang_class('en').Defaults.create_tokenizer()
tok.from_bytes(b)
return tok
@pytest.mark.parametrize('text', ["I💜you", "they’re", "“hello”"])
def test_serialize_tokenizer_roundtrip_bytes(en_tokenizer, text):
tokenizer = en_tokenizer
new_tokenizer = load_tokenizer(tokenizer.to_bytes())
assert_packed_msg_equal(new_tokenizer.to_bytes(), tokenizer.to_bytes())
# assert new_tokenizer.to_bytes() == tokenizer.to_bytes()
doc1 = tokenizer(text)
doc2 = new_tokenizer(text)
assert [token.text for token in doc1] == [token.text for token in doc2]
def test_serialize_tokenizer_roundtrip_disk(en_tokenizer):
tokenizer = en_tokenizer
with make_tempdir() as d:
file_path = d / 'tokenizer'
tokenizer.to_disk(file_path)
tokenizer_d = en_tokenizer.from_disk(file_path)
assert tokenizer.to_bytes() == tokenizer_d.to_bytes()
|
f8290954b27e655562878d16df7e4793262f50d7
|
wafer/tickets/management/commands/import_quicket_guest_list.py
|
wafer/tickets/management/commands/import_quicket_guest_list.py
|
import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = next(reader)
if len(header) != 11:
raise CommandError('CSV format has changed. Update wafer')
for ticket in reader:
self.import_ticket(*ticket)
def import_ticket(self, ticket_number, ticket_barcode, purchase_date,
ticket_type, ticket_holder, email, cellphone, checked_in,
checked_in_date, checked_in_by, complimentary):
import_ticket(ticket_barcode, ticket_type, email)
|
import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date',
'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone',
'Checked in', 'Checked in date', 'Checked in by',
'Complimentary')
keys = [column.lower().replace(' ', '_') for column in columns]
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = tuple(next(reader))
if header != columns:
raise CommandError('CSV format has changed. Update wafer')
for row in reader:
ticket = dict(zip(keys, row))
import_ticket(ticket['ticket_barcode'],
ticket['ticket_type'],
ticket['email'])
|
Check CSV header, not column count (and refactor)
|
Check CSV header, not column count (and refactor)
|
Python
|
isc
|
CarlFK/wafer,CarlFK/wafer,CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
|
import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = next(reader)
if len(header) != 11:
raise CommandError('CSV format has changed. Update wafer')
for ticket in reader:
self.import_ticket(*ticket)
def import_ticket(self, ticket_number, ticket_barcode, purchase_date,
ticket_type, ticket_holder, email, cellphone, checked_in,
checked_in_date, checked_in_by, complimentary):
import_ticket(ticket_barcode, ticket_type, email)
Check CSV header, not column count (and refactor)
|
import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date',
'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone',
'Checked in', 'Checked in date', 'Checked in by',
'Complimentary')
keys = [column.lower().replace(' ', '_') for column in columns]
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = tuple(next(reader))
if header != columns:
raise CommandError('CSV format has changed. Update wafer')
for row in reader:
ticket = dict(zip(keys, row))
import_ticket(ticket['ticket_barcode'],
ticket['ticket_type'],
ticket['email'])
|
<commit_before>import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = next(reader)
if len(header) != 11:
raise CommandError('CSV format has changed. Update wafer')
for ticket in reader:
self.import_ticket(*ticket)
def import_ticket(self, ticket_number, ticket_barcode, purchase_date,
ticket_type, ticket_holder, email, cellphone, checked_in,
checked_in_date, checked_in_by, complimentary):
import_ticket(ticket_barcode, ticket_type, email)
<commit_msg>Check CSV header, not column count (and refactor)<commit_after>
|
import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date',
'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone',
'Checked in', 'Checked in date', 'Checked in by',
'Complimentary')
keys = [column.lower().replace(' ', '_') for column in columns]
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = tuple(next(reader))
if header != columns:
raise CommandError('CSV format has changed. Update wafer')
for row in reader:
ticket = dict(zip(keys, row))
import_ticket(ticket['ticket_barcode'],
ticket['ticket_type'],
ticket['email'])
|
import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = next(reader)
if len(header) != 11:
raise CommandError('CSV format has changed. Update wafer')
for ticket in reader:
self.import_ticket(*ticket)
def import_ticket(self, ticket_number, ticket_barcode, purchase_date,
ticket_type, ticket_holder, email, cellphone, checked_in,
checked_in_date, checked_in_by, complimentary):
import_ticket(ticket_barcode, ticket_type, email)
Check CSV header, not column count (and refactor)import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date',
'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone',
'Checked in', 'Checked in date', 'Checked in by',
'Complimentary')
keys = [column.lower().replace(' ', '_') for column in columns]
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = tuple(next(reader))
if header != columns:
raise CommandError('CSV format has changed. Update wafer')
for row in reader:
ticket = dict(zip(keys, row))
import_ticket(ticket['ticket_barcode'],
ticket['ticket_type'],
ticket['email'])
|
<commit_before>import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = next(reader)
if len(header) != 11:
raise CommandError('CSV format has changed. Update wafer')
for ticket in reader:
self.import_ticket(*ticket)
def import_ticket(self, ticket_number, ticket_barcode, purchase_date,
ticket_type, ticket_holder, email, cellphone, checked_in,
checked_in_date, checked_in_by, complimentary):
import_ticket(ticket_barcode, ticket_type, email)
<commit_msg>Check CSV header, not column count (and refactor)<commit_after>import csv
from django.core.management.base import BaseCommand, CommandError
from wafer.tickets.views import import_ticket
class Command(BaseCommand):
args = '<csv file>'
help = "Import a guest list CSV from Quicket"
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError('1 CSV File required')
columns = ('Ticket Number', 'Ticket Barcode', 'Purchase Date',
'Ticket Type', 'Ticket Holder', 'Email', 'Cellphone',
'Checked in', 'Checked in date', 'Checked in by',
'Complimentary')
keys = [column.lower().replace(' ', '_') for column in columns]
with open(args[0], 'r') as f:
reader = csv.reader(f)
header = tuple(next(reader))
if header != columns:
raise CommandError('CSV format has changed. Update wafer')
for row in reader:
ticket = dict(zip(keys, row))
import_ticket(ticket['ticket_barcode'],
ticket['ticket_type'],
ticket['email'])
|
4a8dbde660361a53a3206097bff9ae95b0edfec7
|
alg_strongly_connected_graph.py
|
alg_strongly_connected_graph.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
|
Add comment for strongly connected graphs
|
Add comment for strongly connected graphs
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
Add comment for strongly connected graphs
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
<commit_msg>Add comment for strongly connected graphs<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
Add comment for strongly connected graphsfrom __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
<commit_msg>Add comment for strongly connected graphs<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def dfs_recur():
pass
def traverse_dfs_recur():
pass
def transpose_graph():
pass
def strongly_connected_graph():
"""Find strongly connected graphs by Kosaraju's Algorithm."""
def main():
# 3 strongly connected graphs: {A, B, D, E, G}, {C}, {F, H, I}.
adjacency_dict = {
'A': {'B'},
'B': {'C', 'E'},
'C': {'C', 'F'},
'D': {'B', 'G'},
'E': {'A', 'D'},
'F': {'H'},
'G': {'E'},
'H': {'I'},
'I': {'F'}
}
if __name__ == '__main__':
main()
|
14d0669f54b1207d8764a97bd4a73e1d4c45f679
|
sep/sep_search_result.py
|
sep/sep_search_result.py
|
from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
self.query = str(query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return output
|
from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
pattern = re.compile('[^a-zA-Z\d\s]')
stripped_query = re.sub(pattern, '', query)
self.query = str(stripped_query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return output
|
Use regex to remove non alphanumerics from post titles
|
New: Use regex to remove non alphanumerics from post titles
|
Python
|
mit
|
AFFogarty/SEP-Bot,AFFogarty/SEP-Bot
|
from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
self.query = str(query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return outputNew: Use regex to remove non alphanumerics from post titles
|
from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
pattern = re.compile('[^a-zA-Z\d\s]')
stripped_query = re.sub(pattern, '', query)
self.query = str(stripped_query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return output
|
<commit_before>from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
self.query = str(query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return output<commit_msg>New: Use regex to remove non alphanumerics from post titles<commit_after>
|
from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
pattern = re.compile('[^a-zA-Z\d\s]')
stripped_query = re.sub(pattern, '', query)
self.query = str(stripped_query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return output
|
from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
self.query = str(query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return outputNew: Use regex to remove non alphanumerics from post titlesfrom lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
pattern = re.compile('[^a-zA-Z\d\s]')
stripped_query = re.sub(pattern, '', query)
self.query = str(stripped_query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return output
|
<commit_before>from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
self.query = str(query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return output<commit_msg>New: Use regex to remove non alphanumerics from post titles<commit_after>from lxml import html
import re
import requests
from constants import SEP_URL
class SEPSearchResult():
query = None
results = None
def __init__(self, query):
self.set_query(query)
def set_query(self, query):
pattern = re.compile('[^a-zA-Z\d\s]')
stripped_query = re.sub(pattern, '', query)
self.query = str(stripped_query).lower().split()
@property
def url(self):
url = SEP_URL + "search/searcher.py?query="
for word in self.query:
url += word + "+"
return url
def request_results(self):
page = requests.get(self.url)
# Remvoe bold tags
text_no_bold = re.sub('</? ?b>', '', page.text)
text_no_newlines = re.sub('\n', '', text_no_bold)
tree = html.fromstring(text_no_newlines)
titles = tree.xpath("//div[@class='result_title']/a/text()")
urls = tree.xpath("//div[@class='result_title']/a/@href")
# Build the output tuples
output = []
for i in range(len(titles)):
output.append(
{
"title": titles[i],
"url": SEP_URL + urls[i].lstrip("../")
}
)
self.results = output
return output
|
9c91cdeaed24ab994924b9a5485b6cc3feb9dfc0
|
tutorials/models.py
|
tutorials/models.py
|
from django.db import models
# Create your models here.
class Tutorial(models.Model):
title = models.TextField()
html = models.TextField()
markdown = models.TextField()
|
from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
|
Add missing Fields according to mockup, Add markdownfield
|
Add missing Fields according to mockup, Add markdownfield
|
Python
|
agpl-3.0
|
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
|
from django.db import models
# Create your models here.
class Tutorial(models.Model):
title = models.TextField()
html = models.TextField()
markdown = models.TextField()Add missing Fields according to mockup, Add markdownfield
|
from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
|
<commit_before>from django.db import models
# Create your models here.
class Tutorial(models.Model):
title = models.TextField()
html = models.TextField()
markdown = models.TextField()<commit_msg>Add missing Fields according to mockup, Add markdownfield<commit_after>
|
from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
|
from django.db import models
# Create your models here.
class Tutorial(models.Model):
title = models.TextField()
html = models.TextField()
markdown = models.TextField()Add missing Fields according to mockup, Add markdownfieldfrom django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
|
<commit_before>from django.db import models
# Create your models here.
class Tutorial(models.Model):
title = models.TextField()
html = models.TextField()
markdown = models.TextField()<commit_msg>Add missing Fields according to mockup, Add markdownfield<commit_after>from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
|
44e9e06c7db64682340505754af0b69b99cae305
|
diceware.py
|
diceware.py
|
#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
|
#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.SystemRandom().choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
|
Use SystemRandom to access urandom where available
|
Use SystemRandom to access urandom where available
|
Python
|
mit
|
davb5/pydiceware
|
#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
Use SystemRandom to access urandom where available
|
#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.SystemRandom().choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
|
<commit_before>#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
<commit_msg>Use SystemRandom to access urandom where available<commit_after>
|
#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.SystemRandom().choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
|
#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
Use SystemRandom to access urandom where available#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.SystemRandom().choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
|
<commit_before>#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
<commit_msg>Use SystemRandom to access urandom where available<commit_after>#!/usr/bin/env python
import random
def read_list(filename):
words = []
with open(filename, "r") as wordfile:
started = False
for line in wordfile:
if not started and line == "\n":
started = True
elif started:
if line == "\n":
break
else:
words.append(line.split("\t")[1].strip())
return words
def generate_passphrase(wordlist, length=5):
return [random.SystemRandom().choice(wordlist) for x in range(length)]
if __name__ == "__main__":
wordlist = read_list("diceware.wordlist.asc")
words = generate_passphrase(wordlist)
print("-".join(words))
|
5081734e07497e26834485891d634a7f3ac7ef28
|
pies/collections.py
|
pies/collections.py
|
from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
from ordereddict import OrderedDict
|
from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
import sys
if sys.version_info < (2, 7):
from ordereddict import OrderedDict
|
Fix import of ordered dict on python 2.7
|
Fix import of ordered dict on python 2.7
|
Python
|
mit
|
AbsoluteMSTR/pies,timothycrosley/pies,lisongmin/pies,AbsoluteMSTR/pies,lisongmin/pies,timothycrosley/pies
|
from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
from ordereddict import OrderedDict
Fix import of ordered dict on python 2.7
|
from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
import sys
if sys.version_info < (2, 7):
from ordereddict import OrderedDict
|
<commit_before>from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
from ordereddict import OrderedDict
<commit_msg>Fix import of ordered dict on python 2.7<commit_after>
|
from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
import sys
if sys.version_info < (2, 7):
from ordereddict import OrderedDict
|
from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
from ordereddict import OrderedDict
Fix import of ordered dict on python 2.7from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
import sys
if sys.version_info < (2, 7):
from ordereddict import OrderedDict
|
<commit_before>from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
from ordereddict import OrderedDict
<commit_msg>Fix import of ordered dict on python 2.7<commit_after>from __future__ import absolute_import
from collections import *
from .version_info import PY2
if PY2:
from UserString import *
from UserList import *
import sys
if sys.version_info < (2, 7):
from ordereddict import OrderedDict
|
fca148d85b0deb16c988473ddab651529653e9de
|
cheroot/__init__.py
|
cheroot/__init__.py
|
"""High-performance, pure-Python HTTP server used by CherryPy."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
|
"""High-performance, pure-Python HTTP server used by CherryPy."""
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
|
Remove compatibility code from cheroot
|
Remove compatibility code from cheroot
|
Python
|
bsd-3-clause
|
cherrypy/cheroot
|
"""High-performance, pure-Python HTTP server used by CherryPy."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
Remove compatibility code from cheroot
|
"""High-performance, pure-Python HTTP server used by CherryPy."""
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
|
<commit_before>"""High-performance, pure-Python HTTP server used by CherryPy."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
<commit_msg>Remove compatibility code from cheroot<commit_after>
|
"""High-performance, pure-Python HTTP server used by CherryPy."""
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
|
"""High-performance, pure-Python HTTP server used by CherryPy."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
Remove compatibility code from cheroot"""High-performance, pure-Python HTTP server used by CherryPy."""
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
|
<commit_before>"""High-performance, pure-Python HTTP server used by CherryPy."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
<commit_msg>Remove compatibility code from cheroot<commit_after>"""High-performance, pure-Python HTTP server used by CherryPy."""
try:
import pkg_resources
except ImportError:
pass
try:
__version__ = pkg_resources.get_distribution('cheroot').version
except Exception:
__version__ = 'unknown'
|
1a93c58e278712a2c52f36b098a570a7f48c7ef2
|
taOonja/game/views.py
|
taOonja/game/views.py
|
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import *
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Detail.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
context = super(LocationDetailView, self).get_context_data(**kwargs)
context['detail_info'] = Detail.objects.all()
return context
|
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import Location
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Location.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
l_pk = self.kwargs['pk']
Location.objects.filter(pk=l_pk).update(visited = True)
context = super(LocationDetailView, self).get_context_data(**kwargs)
return context
|
Change View According to model Changes
|
Change View According to model Changes
|
Python
|
mit
|
Javid-Izadfar/TaOonja,Javid-Izadfar/TaOonja,Javid-Izadfar/TaOonja
|
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import *
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Detail.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
context = super(LocationDetailView, self).get_context_data(**kwargs)
context['detail_info'] = Detail.objects.all()
return context
Change View According to model Changes
|
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import Location
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Location.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
l_pk = self.kwargs['pk']
Location.objects.filter(pk=l_pk).update(visited = True)
context = super(LocationDetailView, self).get_context_data(**kwargs)
return context
|
<commit_before>from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import *
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Detail.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
context = super(LocationDetailView, self).get_context_data(**kwargs)
context['detail_info'] = Detail.objects.all()
return context
<commit_msg>Change View According to model Changes<commit_after>
|
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import Location
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Location.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
l_pk = self.kwargs['pk']
Location.objects.filter(pk=l_pk).update(visited = True)
context = super(LocationDetailView, self).get_context_data(**kwargs)
return context
|
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import *
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Detail.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
context = super(LocationDetailView, self).get_context_data(**kwargs)
context['detail_info'] = Detail.objects.all()
return context
Change View According to model Changesfrom django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import Location
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Location.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
l_pk = self.kwargs['pk']
Location.objects.filter(pk=l_pk).update(visited = True)
context = super(LocationDetailView, self).get_context_data(**kwargs)
return context
|
<commit_before>from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import *
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Detail.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
context = super(LocationDetailView, self).get_context_data(**kwargs)
context['detail_info'] = Detail.objects.all()
return context
<commit_msg>Change View According to model Changes<commit_after>from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from game.models import Location
class LocationListView(ListView):
template_name = 'game/location_list.html'
context_object_name = 'location_list'
def get_queryset(self):
return Location.objects.all()
def get_context_data(self, **kwargs):
context = super(LocationListView, self).get_context_data(**kwargs)
return context
class LocationDetailView(DetailView):
model = Location
context_object_name = 'location_detail'
def get_context_data(self, **kwargs):
l_pk = self.kwargs['pk']
Location.objects.filter(pk=l_pk).update(visited = True)
context = super(LocationDetailView, self).get_context_data(**kwargs)
return context
|
c57290f07071a42a0744667974922fda897e6354
|
slaveapi/__init__.py
|
slaveapi/__init__.py
|
__version_info__ = ("1", "0", "17")
__version__ = ".".join(__version_info__)
|
__version_info__ = ("1", "0", "18")
__version__ = ".".join(__version_info__)
|
Bump ver for bustage fix
|
Bump ver for bustage fix
|
Python
|
mpl-2.0
|
lundjordan/slaveapi
|
__version_info__ = ("1", "0", "17")
__version__ = ".".join(__version_info__)
Bump ver for bustage fix
|
__version_info__ = ("1", "0", "18")
__version__ = ".".join(__version_info__)
|
<commit_before>__version_info__ = ("1", "0", "17")
__version__ = ".".join(__version_info__)
<commit_msg>Bump ver for bustage fix<commit_after>
|
__version_info__ = ("1", "0", "18")
__version__ = ".".join(__version_info__)
|
__version_info__ = ("1", "0", "17")
__version__ = ".".join(__version_info__)
Bump ver for bustage fix__version_info__ = ("1", "0", "18")
__version__ = ".".join(__version_info__)
|
<commit_before>__version_info__ = ("1", "0", "17")
__version__ = ".".join(__version_info__)
<commit_msg>Bump ver for bustage fix<commit_after>__version_info__ = ("1", "0", "18")
__version__ = ".".join(__version_info__)
|
99880b935c939ab7128a788cc09cd759f3d397b2
|
src/passgen.py
|
src/passgen.py
|
import string
import random
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
for _ in range(10):
print passgen()
|
import string
import random
import argparse
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser("Generate strong random password.")
parser.add_argument("length",
help="the number of characters to generate ",
type=int)
args = parser.parse_args()
for _ in range(10):
print passgen(args.length)
|
Add length argument to the main script
|
Add length argument to the main script
|
Python
|
mit
|
soslan/passgen
|
import string
import random
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
for _ in range(10):
print passgen()
Add length argument to the main script
|
import string
import random
import argparse
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser("Generate strong random password.")
parser.add_argument("length",
help="the number of characters to generate ",
type=int)
args = parser.parse_args()
for _ in range(10):
print passgen(args.length)
|
<commit_before>import string
import random
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
for _ in range(10):
print passgen()
<commit_msg>Add length argument to the main script<commit_after>
|
import string
import random
import argparse
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser("Generate strong random password.")
parser.add_argument("length",
help="the number of characters to generate ",
type=int)
args = parser.parse_args()
for _ in range(10):
print passgen(args.length)
|
import string
import random
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
for _ in range(10):
print passgen()
Add length argument to the main scriptimport string
import random
import argparse
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser("Generate strong random password.")
parser.add_argument("length",
help="the number of characters to generate ",
type=int)
args = parser.parse_args()
for _ in range(10):
print passgen(args.length)
|
<commit_before>import string
import random
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
for _ in range(10):
print passgen()
<commit_msg>Add length argument to the main script<commit_after>import string
import random
import argparse
def passgen(length=8):
"""Generate a strong password with *length* characters"""
pool = string.ascii_uppercase + string.ascii_lowercase + string.digits
return ''.join(random.SystemRandom().choice(pool) for _ in range(length))
def main():
parser = argparse.ArgumentParser("Generate strong random password.")
parser.add_argument("length",
help="the number of characters to generate ",
type=int)
args = parser.parse_args()
for _ in range(10):
print passgen(args.length)
|
0053cba05e19f640b5d30d02a130f6c994f68f8e
|
speedcenter/codespeed/admin.py
|
speedcenter/codespeed/admin.py
|
# -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
|
# -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
ordering = ['name']
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
|
Order Benchmark by name in the Admin
|
Order Benchmark by name in the Admin
|
Python
|
lgpl-2.1
|
cykl/codespeed,alex/codespeed,nomeata/codespeed,alex/codespeed,cykl/codespeed,nomeata/codespeed
|
# -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
Order Benchmark by name in the Admin
|
# -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
ordering = ['name']
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
<commit_msg>Order Benchmark by name in the Admin<commit_after>
|
# -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
ordering = ['name']
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
|
# -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
Order Benchmark by name in the Admin# -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
ordering = ['name']
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
|
<commit_before># -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
<commit_msg>Order Benchmark by name in the Admin<commit_after># -*- coding: utf-8 -*-
from codespeed.models import Project, Revision, Executable, Benchmark, Result, Environment
from django.contrib import admin
class ProjectAdmin(admin.ModelAdmin):
list_display = ('name', 'repo_type', 'repo_path', 'track')
admin.site.register(Project, ProjectAdmin)
class RevisionAdmin(admin.ModelAdmin):
list_display = ('commitid', 'project', 'tag', 'date')
list_filter = ('project', 'tag', 'date')
search_fields = ['commitid']
admin.site.register(Revision, RevisionAdmin)
class ExecutableAdmin(admin.ModelAdmin):
list_display = ('name', 'description', 'id')
admin.site.register(Executable, ExecutableAdmin)
class BenchmarkAdmin(admin.ModelAdmin):
list_display = ('name', 'benchmark_type', 'description', 'units_title', 'units', 'lessisbetter')
ordering = ['name']
admin.site.register(Benchmark, BenchmarkAdmin)
class EnvironmentAdmin(admin.ModelAdmin):
list_display = ('name', 'cpu', 'memory', 'os', 'kernel')
admin.site.register(Environment, EnvironmentAdmin)
class ResultAdmin(admin.ModelAdmin):
list_display = ('revision', 'benchmark', 'executable', 'environment', 'value', 'date', 'environment')
list_filter = ('date', 'executable', 'benchmark', 'environment')
admin.site.register(Result, ResultAdmin)
|
b8d8ac842f0c22607a32995a48180261d6a84c39
|
cumulus/management/commands/collectstatic.py
|
cumulus/management/commands/collectstatic.py
|
import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
super(Command, self).delete_file(path, prefixed_path, source_storage)
|
import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
return super(Command, self).delete_file(path, prefixed_path, source_storage)
|
Fix collecstatic command return value
|
Fix collecstatic command return value
delete_file() method should return a boolean, was missing
a return when calling super()
|
Python
|
bsd-3-clause
|
SmithsonianEnterprises/django-cumulus,absoludity/django-cumulus,bennylope/django-cumulus,absoludity/django-cumulus,ferrix/django-cumulus,django-cumulus/django-cumulus,bennylope/django-cumulus,elsmorian/django-cumulus,ImaginaryLandscape/django-cumulus,rizumu/django-cumulus,SmithsonianEnterprises/django-cumulus,CoachLogix/django-cumulus,mandx/django-cumulus,elsmorian/django-cumulus
|
import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
super(Command, self).delete_file(path, prefixed_path, source_storage)
Fix collecstatic command return value
delete_file() method should return a boolean, was missing
a return when calling super()
|
import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
return super(Command, self).delete_file(path, prefixed_path, source_storage)
|
<commit_before>import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
super(Command, self).delete_file(path, prefixed_path, source_storage)
<commit_msg>Fix collecstatic command return value
delete_file() method should return a boolean, was missing
a return when calling super()<commit_after>
|
import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
return super(Command, self).delete_file(path, prefixed_path, source_storage)
|
import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
super(Command, self).delete_file(path, prefixed_path, source_storage)
Fix collecstatic command return value
delete_file() method should return a boolean, was missing
a return when calling super()import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
return super(Command, self).delete_file(path, prefixed_path, source_storage)
|
<commit_before>import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
super(Command, self).delete_file(path, prefixed_path, source_storage)
<commit_msg>Fix collecstatic command return value
delete_file() method should return a boolean, was missing
a return when calling super()<commit_after>import hashlib
from django.contrib.staticfiles.management.commands import collectstatic
from cumulus.storage import CloudFilesStorage
class Command(collectstatic.Command):
def delete_file(self, path, prefixed_path, source_storage):
"""
Checks if the target file should be deleted if it already exists
"""
if isinstance(self.storage, CloudFilesStorage):
if self.storage.exists(prefixed_path):
try:
etag = self.storage._get_cloud_obj(prefixed_path).etag
digest = "{0}".format(hashlib.md5(source_storage.open(path).read()).hexdigest())
print etag, digest
if etag == digest:
self.log(u"Skipping '{0}' (not modified based on file hash)".format(path))
return False
except:
raise
return super(Command, self).delete_file(path, prefixed_path, source_storage)
|
4ace9edb7432b5c0de677924301477ce86480486
|
common/safeprint.py
|
common/safeprint.py
|
import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
string = ("[" + str(multiprocessing.current_process().pid) + "] " +
datetime.now().strftime('%H:%M:%S: ') + str(msg) + '\r\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
|
import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
max_digits = 0
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
pid = str(multiprocessing.current_process().pid)
max_digits = max(max_digits, len(pid))
pid = pid.zfill(max_digits)
string = ("[" + pid + "] " + datetime.now().strftime('%H:%M:%S: ') +
str(msg) + '\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
|
Make thread marker consistent length
|
Make thread marker consistent length
|
Python
|
mit
|
gappleto97/Senior-Project
|
import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
string = ("[" + str(multiprocessing.current_process().pid) + "] " +
datetime.now().strftime('%H:%M:%S: ') + str(msg) + '\r\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
Make thread marker consistent length
|
import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
max_digits = 0
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
pid = str(multiprocessing.current_process().pid)
max_digits = max(max_digits, len(pid))
pid = pid.zfill(max_digits)
string = ("[" + pid + "] " + datetime.now().strftime('%H:%M:%S: ') +
str(msg) + '\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
|
<commit_before>import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
string = ("[" + str(multiprocessing.current_process().pid) + "] " +
datetime.now().strftime('%H:%M:%S: ') + str(msg) + '\r\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
<commit_msg>Make thread marker consistent length<commit_after>
|
import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
max_digits = 0
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
pid = str(multiprocessing.current_process().pid)
max_digits = max(max_digits, len(pid))
pid = pid.zfill(max_digits)
string = ("[" + pid + "] " + datetime.now().strftime('%H:%M:%S: ') +
str(msg) + '\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
|
import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
string = ("[" + str(multiprocessing.current_process().pid) + "] " +
datetime.now().strftime('%H:%M:%S: ') + str(msg) + '\r\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
Make thread marker consistent lengthimport multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
max_digits = 0
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
pid = str(multiprocessing.current_process().pid)
max_digits = max(max_digits, len(pid))
pid = pid.zfill(max_digits)
string = ("[" + pid + "] " + datetime.now().strftime('%H:%M:%S: ') +
str(msg) + '\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
|
<commit_before>import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
string = ("[" + str(multiprocessing.current_process().pid) + "] " +
datetime.now().strftime('%H:%M:%S: ') + str(msg) + '\r\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
<commit_msg>Make thread marker consistent length<commit_after>import multiprocessing, sys
from datetime import datetime
from common import settings
print_lock = multiprocessing.RLock()
max_digits = 0
def safeprint(msg, verbosity=0):
"""Prints in a thread-lock, taking a single object as an argument"""
pid = str(multiprocessing.current_process().pid)
max_digits = max(max_digits, len(pid))
pid = pid.zfill(max_digits)
string = ("[" + pid + "] " + datetime.now().strftime('%H:%M:%S: ') +
str(msg) + '\n')
with print_lock:
with open("output.txt", "a") as log:
log.write(string)
if settings.config.get('verbose') >= verbosity:
sys.stdout.write(string)
|
ac8dbe8f70061906035ea24ae6bae91f0432dca8
|
astropy/utils/setup_package.py
|
astropy/utils/setup_package.py
|
from distutils.core import Extension
from os.path import dirname, join
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[join(ROOT, 'src', 'compiler.c')])
]
|
from distutils.core import Extension
from os.path import dirname, join, relpath
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[relpath(join(ROOT, 'src', 'compiler.c'))])
]
|
Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.)
|
Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.)
|
Python
|
bsd-3-clause
|
MSeifert04/astropy,pllim/astropy,funbaker/astropy,stargaser/astropy,lpsinger/astropy,DougBurke/astropy,larrybradley/astropy,AustereCuriosity/astropy,dhomeier/astropy,saimn/astropy,mhvk/astropy,tbabej/astropy,DougBurke/astropy,kelle/astropy,AustereCuriosity/astropy,saimn/astropy,mhvk/astropy,bsipocz/astropy,funbaker/astropy,astropy/astropy,stargaser/astropy,joergdietrich/astropy,StuartLittlefair/astropy,stargaser/astropy,aleksandr-bakanov/astropy,tbabej/astropy,AustereCuriosity/astropy,saimn/astropy,larrybradley/astropy,astropy/astropy,kelle/astropy,MSeifert04/astropy,larrybradley/astropy,StuartLittlefair/astropy,lpsinger/astropy,funbaker/astropy,dhomeier/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,larrybradley/astropy,tbabej/astropy,AustereCuriosity/astropy,tbabej/astropy,pllim/astropy,tbabej/astropy,pllim/astropy,saimn/astropy,DougBurke/astropy,lpsinger/astropy,astropy/astropy,joergdietrich/astropy,lpsinger/astropy,bsipocz/astropy,MSeifert04/astropy,pllim/astropy,joergdietrich/astropy,lpsinger/astropy,kelle/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,pllim/astropy,dhomeier/astropy,mhvk/astropy,larrybradley/astropy,astropy/astropy,StuartLittlefair/astropy,bsipocz/astropy,MSeifert04/astropy,kelle/astropy,bsipocz/astropy,astropy/astropy,stargaser/astropy,funbaker/astropy,kelle/astropy,dhomeier/astropy,DougBurke/astropy,mhvk/astropy,saimn/astropy,joergdietrich/astropy,StuartLittlefair/astropy,mhvk/astropy
|
from distutils.core import Extension
from os.path import dirname, join
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[join(ROOT, 'src', 'compiler.c')])
]
Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.)
|
from distutils.core import Extension
from os.path import dirname, join, relpath
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[relpath(join(ROOT, 'src', 'compiler.c'))])
]
|
<commit_before>from distutils.core import Extension
from os.path import dirname, join
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[join(ROOT, 'src', 'compiler.c')])
]
<commit_msg>Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.)<commit_after>
|
from distutils.core import Extension
from os.path import dirname, join, relpath
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[relpath(join(ROOT, 'src', 'compiler.c'))])
]
|
from distutils.core import Extension
from os.path import dirname, join
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[join(ROOT, 'src', 'compiler.c')])
]
Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.)from distutils.core import Extension
from os.path import dirname, join, relpath
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[relpath(join(ROOT, 'src', 'compiler.c'))])
]
|
<commit_before>from distutils.core import Extension
from os.path import dirname, join
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[join(ROOT, 'src', 'compiler.c')])
]
<commit_msg>Make sure to use the relative path for all C extension source files. Otherwise distuils' MSVC compiler generates some potentially long (too long for Windows) pathnames in the build\temp dir for various compiler artifacts. (This was particularly problematic in Jenkins, where having multiple configuration matrix axes can make for long path names.)<commit_after>from distutils.core import Extension
from os.path import dirname, join, relpath
def get_extensions():
ROOT = dirname(__file__)
return [
Extension('astropy.utils._compiler',
[relpath(join(ROOT, 'src', 'compiler.c'))])
]
|
bab9464394c91f63786c50080292347339aa122a
|
bonemapy_version.py
|
bonemapy_version.py
|
__version__ = '0.2.2'
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Michael Hogg
# This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution
__version__ = '0.2.2'
|
Add license + redist info to version file
|
Add license + redist info to version file
|
Python
|
mit
|
mhogg/bonemapy
|
__version__ = '0.2.2'Add license + redist info to version file
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Michael Hogg
# This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution
__version__ = '0.2.2'
|
<commit_before>
__version__ = '0.2.2'<commit_msg>Add license + redist info to version file<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Michael Hogg
# This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution
__version__ = '0.2.2'
|
__version__ = '0.2.2'Add license + redist info to version file# -*- coding: utf-8 -*-
# Copyright (C) 2013 Michael Hogg
# This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution
__version__ = '0.2.2'
|
<commit_before>
__version__ = '0.2.2'<commit_msg>Add license + redist info to version file<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 2013 Michael Hogg
# This file is part of bonemapy - See LICENSE.txt for information on usage and redistribution
__version__ = '0.2.2'
|
6d816ac65cd26601440876295cf70955f172d6d0
|
organizer/models.py
|
organizer/models.py
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(
max_length=31, db_index=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
description = models.TextField()
founded_date = models.DateField(
'date founded')
contact = models.EmailField()
website = models.URLField(max_length=255)
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
|
Add options to Startup model fields.
|
Ch03: Add options to Startup model fields. [skip ci]
Field options allow us to easily customize behavior of a field.
Global Field Options:
https://docs.djangoproject.com/en/1.8/ref/models/fields/#db-index
https://docs.djangoproject.com/en/1.8/ref/models/fields/#help-text
https://docs.djangoproject.com/en/1.8/ref/models/fields/#unique
https://docs.djangoproject.com/en/1.8/ref/models/fields/#verbose-name
Verbose names are further documented in:
https://docs.djangoproject.com/en/1.8/topics/db/models/#verbose-field-names
The max_length field option is defined in CharField and inherited by all
CharField subclasses (but is typically optional in these subclasses,
unlike CharField itself).
The 255 character limit of the URLField is based on RFC 3986.
https://tools.ietf.org/html/rfc3986
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
Ch03: Add options to Startup model fields. [skip ci]
Field options allow us to easily customize behavior of a field.
Global Field Options:
https://docs.djangoproject.com/en/1.8/ref/models/fields/#db-index
https://docs.djangoproject.com/en/1.8/ref/models/fields/#help-text
https://docs.djangoproject.com/en/1.8/ref/models/fields/#unique
https://docs.djangoproject.com/en/1.8/ref/models/fields/#verbose-name
Verbose names are further documented in:
https://docs.djangoproject.com/en/1.8/topics/db/models/#verbose-field-names
The max_length field option is defined in CharField and inherited by all
CharField subclasses (but is typically optional in these subclasses,
unlike CharField itself).
The 255 character limit of the URLField is based on RFC 3986.
https://tools.ietf.org/html/rfc3986
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(
max_length=31, db_index=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
description = models.TextField()
founded_date = models.DateField(
'date founded')
contact = models.EmailField()
website = models.URLField(max_length=255)
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
|
<commit_before>from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
<commit_msg>Ch03: Add options to Startup model fields. [skip ci]
Field options allow us to easily customize behavior of a field.
Global Field Options:
https://docs.djangoproject.com/en/1.8/ref/models/fields/#db-index
https://docs.djangoproject.com/en/1.8/ref/models/fields/#help-text
https://docs.djangoproject.com/en/1.8/ref/models/fields/#unique
https://docs.djangoproject.com/en/1.8/ref/models/fields/#verbose-name
Verbose names are further documented in:
https://docs.djangoproject.com/en/1.8/topics/db/models/#verbose-field-names
The max_length field option is defined in CharField and inherited by all
CharField subclasses (but is typically optional in these subclasses,
unlike CharField itself).
The 255 character limit of the URLField is based on RFC 3986.
https://tools.ietf.org/html/rfc3986<commit_after>
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(
max_length=31, db_index=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
description = models.TextField()
founded_date = models.DateField(
'date founded')
contact = models.EmailField()
website = models.URLField(max_length=255)
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
|
from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
Ch03: Add options to Startup model fields. [skip ci]
Field options allow us to easily customize behavior of a field.
Global Field Options:
https://docs.djangoproject.com/en/1.8/ref/models/fields/#db-index
https://docs.djangoproject.com/en/1.8/ref/models/fields/#help-text
https://docs.djangoproject.com/en/1.8/ref/models/fields/#unique
https://docs.djangoproject.com/en/1.8/ref/models/fields/#verbose-name
Verbose names are further documented in:
https://docs.djangoproject.com/en/1.8/topics/db/models/#verbose-field-names
The max_length field option is defined in CharField and inherited by all
CharField subclasses (but is typically optional in these subclasses,
unlike CharField itself).
The 255 character limit of the URLField is based on RFC 3986.
https://tools.ietf.org/html/rfc3986from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(
max_length=31, db_index=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
description = models.TextField()
founded_date = models.DateField(
'date founded')
contact = models.EmailField()
website = models.URLField(max_length=255)
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
|
<commit_before>from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(max_length=31)
slug = models.SlugField()
description = models.TextField()
founded_date = models.DateField()
contact = models.EmailField()
website = models.URLField()
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
<commit_msg>Ch03: Add options to Startup model fields. [skip ci]
Field options allow us to easily customize behavior of a field.
Global Field Options:
https://docs.djangoproject.com/en/1.8/ref/models/fields/#db-index
https://docs.djangoproject.com/en/1.8/ref/models/fields/#help-text
https://docs.djangoproject.com/en/1.8/ref/models/fields/#unique
https://docs.djangoproject.com/en/1.8/ref/models/fields/#verbose-name
Verbose names are further documented in:
https://docs.djangoproject.com/en/1.8/topics/db/models/#verbose-field-names
The max_length field option is defined in CharField and inherited by all
CharField subclasses (but is typically optional in these subclasses,
unlike CharField itself).
The 255 character limit of the URLField is based on RFC 3986.
https://tools.ietf.org/html/rfc3986<commit_after>from django.db import models
# Model Field Reference
# https://docs.djangoproject.com/en/1.8/ref/models/fields/
class Tag(models.Model):
name = models.CharField(
max_length=31, unique=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
class Startup(models.Model):
name = models.CharField(
max_length=31, db_index=True)
slug = models.SlugField(
max_length=31,
unique=True,
help_text='A label for URL config.')
description = models.TextField()
founded_date = models.DateField(
'date founded')
contact = models.EmailField()
website = models.URLField(max_length=255)
tags = models.ManyToManyField(Tag)
class NewsLink(models.Model):
title = models.CharField(max_length=63)
pub_date = models.DateField('date published')
link = models.URLField(max_length=255)
startup = models.ForeignKey(Startup)
|
58eb2f88821f7284c744838725351ddff67dd7f4
|
website/views.py
|
website/views.py
|
from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}),
content_type="application/json")
|
from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}, indent=4),
content_type="application/json")
|
Add pretty printing for API
|
Add pretty printing for API
|
Python
|
apache-2.0
|
srct/whats-open,srct/whats-open,srct/whats-open
|
from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}),
content_type="application/json")
Add pretty printing for API
|
from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}, indent=4),
content_type="application/json")
|
<commit_before>from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}),
content_type="application/json")
<commit_msg>Add pretty printing for API<commit_after>
|
from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}, indent=4),
content_type="application/json")
|
from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}),
content_type="application/json")
Add pretty printing for APIfrom django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}, indent=4),
content_type="application/json")
|
<commit_before>from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}),
content_type="application/json")
<commit_msg>Add pretty printing for API<commit_after>from django.template import RequestContext
from website.models import Restaurant, OpenTime, BaseModel
from website.api import export_data
from django.shortcuts import render_to_response
from django.http import HttpResponse
from django.views.decorators.http import condition
import hashlib
import json
def restaurant_grid(request):
"""Display the restaurants in a grid. Main page."""
if 'sort' in request.GET:
if request.GET['sort'] == 'location':
# Display the grid by location (instead of listing alphabetically)
pass # Not implemented yet
return render_to_response('restaurant_grid.html',
context_instance=RequestContext(request))
def gen_etag(request):
return hashlib.sha1(str(OpenTime.objects.all())).hexdigest()
def gen_last_modified(request):
return BaseModel.objects.all().order_by('-last_modified')[0].last_modified
@condition(etag_func=gen_etag, last_modified_func=gen_last_modified)
def ajax_schedule_data(request):
# Wrapping up in an object to avoid possible CSRF attack on top-level
# arrays in JSON objects
return HttpResponse(json.dumps({'data': export_data()}, indent=4),
content_type="application/json")
|
b56a198cd58787dff5be1f7fa476e0d75fd17e88
|
notifications/upcoming_match.py
|
notifications/upcoming_match.py
|
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.put()
|
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.updated = datetime.datetime.now()
self.match.put()
|
Update match modified time when we mark push as sent
|
Update match modified time when we mark push as sent
|
Python
|
mit
|
synth3tk/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,1fish2/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,1fish2/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance
|
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.put()
Update match modified time when we mark push as sent
|
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.updated = datetime.datetime.now()
self.match.put()
|
<commit_before>from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.put()
<commit_msg>Update match modified time when we mark push as sent<commit_after>
|
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.updated = datetime.datetime.now()
self.match.put()
|
from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.put()
Update match modified time when we mark push as sentfrom consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.updated = datetime.datetime.now()
self.match.put()
|
<commit_before>from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.put()
<commit_msg>Update match modified time when we mark push as sent<commit_after>from consts.notification_type import NotificationType
from notifications.base_notification import BaseNotification
class UpcomingMatchNotification(BaseNotification):
def __init__(self, match, event):
self.match = match
self.event = event
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[NotificationType.UPCOMING_MATCH]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match_key'] = self.match.key_name
data['message_data']['team_keys'] = self.match.team_key_names
data['message_data']['scheduled_time'] = self.match.time
data['message_data']['predicted_time'] = self.match.time # TODO Add in some time predictions
return data
def render(self, client_types):
super(UpcomingMatchNotification, self).render(client_types)
self.match.push_sent = True
self.match.updated = datetime.datetime.now()
self.match.put()
|
30c019447bdf99589383a50b9b68d5ae2dcc76a8
|
test/inventory_api.py
|
test/inventory_api.py
|
#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)
|
#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungrouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)
|
Fix typo in inventory API test script.
|
Fix typo in inventory API test script.
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)Fix typo in inventory API test script.
|
#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungrouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)<commit_msg>Fix typo in inventory API test script.<commit_after>
|
#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungrouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)
|
#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)Fix typo in inventory API test script.#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungrouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)
|
<commit_before>#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)<commit_msg>Fix typo in inventory API test script.<commit_after>#!/usr/bin/env python
import json
import sys
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-l', '--list', default=False, dest="list_hosts", action="store_true")
parser.add_option('-H', '--host', default=None, dest="host")
parser.add_option('-e', '--extra-vars', default=None, dest="extra")
options, args = parser.parse_args()
systems = {
"ungrouped": [ "jupiter", "saturn" ],
"greek": [ "zeus", "hera", "poseidon" ],
"norse": [ "thor", "odin", "loki" ]
}
variables = {
"thor": {
"hammer": True
}
}
if options.list_hosts == True:
print json.dumps(systems)
sys.exit(0)
if options.host is not None:
if options.extra:
k,v = options.extra.split("=")
variables[options.host][k] = v
print json.dumps(variables[options.host])
sys.exit(0)
parser.print_help()
sys.exit(1)
|
02b08ce32a17d62bc54d648e2fe53282aa38651e
|
core/build/views.py
|
core/build/views.py
|
from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.pop("raw", False):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
|
from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.get('raw'):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
|
Revert "use pop instead of get because it doens't cause uncaught exceptions."
|
Revert "use pop instead of get because it doens't cause uncaught exceptions."
This reverts commit 7aa3e4128b9df890a2683faee0ebe2ee8e64ce33.
|
Python
|
bsd-3-clause
|
rtucker-mozilla/mozilla_inventory,rtucker-mozilla/mozilla_inventory,mozilla/inventory,mozilla/inventory,rtucker-mozilla/mozilla_inventory,rtucker-mozilla/mozilla_inventory,mozilla/inventory,rtucker-mozilla/inventory,rtucker-mozilla/inventory,rtucker-mozilla/inventory,rtucker-mozilla/inventory,rtucker-mozilla/mozilla_inventory,mozilla/inventory,mozilla/inventory,rtucker-mozilla/inventory
|
from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.pop("raw", False):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
Revert "use pop instead of get because it doens't cause uncaught exceptions."
This reverts commit 7aa3e4128b9df890a2683faee0ebe2ee8e64ce33.
|
from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.get('raw'):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
|
<commit_before>from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.pop("raw", False):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
<commit_msg>Revert "use pop instead of get because it doens't cause uncaught exceptions."
This reverts commit 7aa3e4128b9df890a2683faee0ebe2ee8e64ce33.<commit_after>
|
from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.get('raw'):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
|
from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.pop("raw", False):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
Revert "use pop instead of get because it doens't cause uncaught exceptions."
This reverts commit 7aa3e4128b9df890a2683faee0ebe2ee8e64ce33.from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.get('raw'):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
|
<commit_before>from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.pop("raw", False):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
<commit_msg>Revert "use pop instead of get because it doens't cause uncaught exceptions."
This reverts commit 7aa3e4128b9df890a2683faee0ebe2ee8e64ce33.<commit_after>from core.build.subnet import build_subnet
from core.network.models import Network
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse
import pdb
def build_network(request, network_pk):
network = get_object_or_404(Network, pk=network_pk)
if request.GET.get('raw'):
DEBUG_BUILD_STRING = build_subnet(network, raw=True)
return HttpResponse(DEBUG_BUILD_STRING)
else:
DEBUG_BUILD_STRING = build_subnet(network, raw=False)
return render_to_response('build/sample_build.html',
{'data': DEBUG_BUILD_STRING, 'network': network})
|
50783ead4aab7dc2ea32b0045ff12a0bacf2b21d
|
challenge/models.py
|
challenge/models.py
|
from datetime import datetime, timedelta
from django.db import models
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/challenge/%i/" % self.id
|
from datetime import datetime, timedelta
from django.db import models
from django.core.urlresolvers import reverse
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('challenge-details', args=[self.id,])
|
Use reverse() instead of a hardcoded url for Challenge
|
Use reverse() instead of a hardcoded url for Challenge
Instead of hardcoding the url in Challenge.get_absolute_url(), use
reverse().
Signed-off-by: Sebastian Nowicki <72cef2e23c539c1a2a17e6651ec7265f998b0d23@gmail.com>
|
Python
|
bsd-3-clause
|
wraithan/archcode
|
from datetime import datetime, timedelta
from django.db import models
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/challenge/%i/" % self.id
Use reverse() instead of a hardcoded url for Challenge
Instead of hardcoding the url in Challenge.get_absolute_url(), use
reverse().
Signed-off-by: Sebastian Nowicki <72cef2e23c539c1a2a17e6651ec7265f998b0d23@gmail.com>
|
from datetime import datetime, timedelta
from django.db import models
from django.core.urlresolvers import reverse
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('challenge-details', args=[self.id,])
|
<commit_before>from datetime import datetime, timedelta
from django.db import models
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/challenge/%i/" % self.id
<commit_msg>Use reverse() instead of a hardcoded url for Challenge
Instead of hardcoding the url in Challenge.get_absolute_url(), use
reverse().
Signed-off-by: Sebastian Nowicki <72cef2e23c539c1a2a17e6651ec7265f998b0d23@gmail.com><commit_after>
|
from datetime import datetime, timedelta
from django.db import models
from django.core.urlresolvers import reverse
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('challenge-details', args=[self.id,])
|
from datetime import datetime, timedelta
from django.db import models
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/challenge/%i/" % self.id
Use reverse() instead of a hardcoded url for Challenge
Instead of hardcoding the url in Challenge.get_absolute_url(), use
reverse().
Signed-off-by: Sebastian Nowicki <72cef2e23c539c1a2a17e6651ec7265f998b0d23@gmail.com>from datetime import datetime, timedelta
from django.db import models
from django.core.urlresolvers import reverse
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('challenge-details', args=[self.id,])
|
<commit_before>from datetime import datetime, timedelta
from django.db import models
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/challenge/%i/" % self.id
<commit_msg>Use reverse() instead of a hardcoded url for Challenge
Instead of hardcoding the url in Challenge.get_absolute_url(), use
reverse().
Signed-off-by: Sebastian Nowicki <72cef2e23c539c1a2a17e6651ec7265f998b0d23@gmail.com><commit_after>from datetime import datetime, timedelta
from django.db import models
from django.core.urlresolvers import reverse
CHALLENGE_STATUS_CHOICES = (
('NW', 'New'),
('IP', 'In Progress'),
('CP', 'Completed'),
)
class Challenge(models.Model):
title = models.CharField('Challenge Title', max_length = 200)
starts = models.DateTimeField('Start Date', default = datetime.now)
ends = models.DateTimeField('End Date', default = lambda: datetime.now() +
timedelta(days=7))
status = models.CharField('Challenge Status', max_length = 2,
choices=CHALLENGE_STATUS_CHOICES)
summary = models.TextField('Challenge Details')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('challenge-details', args=[self.id,])
|
b149e2089ee819f59e920f8c295c623dce813ab7
|
was/photo/urls.py
|
was/photo/urls.py
|
from django.conf.urls import url
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', CreateAlbumView.as_view(), name='create_album')
]
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', login_required(CreateAlbumView.as_view()), name='create_album')
]
|
Add login requirement on create album view
|
Add login requirement on create album view
|
Python
|
mit
|
KeserOner/where-artists-share,KeserOner/where-artists-share
|
from django.conf.urls import url
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', CreateAlbumView.as_view(), name='create_album')
]
Add login requirement on create album view
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', login_required(CreateAlbumView.as_view()), name='create_album')
]
|
<commit_before>from django.conf.urls import url
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', CreateAlbumView.as_view(), name='create_album')
]
<commit_msg>Add login requirement on create album view<commit_after>
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', login_required(CreateAlbumView.as_view()), name='create_album')
]
|
from django.conf.urls import url
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', CreateAlbumView.as_view(), name='create_album')
]
Add login requirement on create album viewfrom django.conf.urls import url
from django.contrib.auth.decorators import login_required
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', login_required(CreateAlbumView.as_view()), name='create_album')
]
|
<commit_before>from django.conf.urls import url
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', CreateAlbumView.as_view(), name='create_album')
]
<commit_msg>Add login requirement on create album view<commit_after>from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from .views import (upload_photo_artist, delete_photo_artist,
AlbumListView, CreateAlbumView)
urlpatterns = [
url(r'^upload/?$', upload_photo_artist, name='upload_photo_artist'),
url(r'^(?P<photo_id>\d+)/delete/$', delete_photo_artist, name='delete_photo_artist'),
url(r'^album/(?P<user_pk>\d+)/$', AlbumListView.as_view(), name='list_artist_albums'),
url(r'^create-album/$', login_required(CreateAlbumView.as_view()), name='create_album')
]
|
f3875956cda23c4b0086dbc083161dc6f2c1a771
|
spicedham/split_tokenizer.py
|
spicedham/split_tokenizer.py
|
from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
is_not_blank = lambda x: x != ''
text = filter(is_not_blank, text)
lower_case = lambda x: x.lower()
text = map(lower_case, text)
return text
|
from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
text = [token.lower() for token in text if token]
return text
|
Make mapping & filtering into a list comprehension
|
Make mapping & filtering into a list comprehension
|
Python
|
mpl-2.0
|
mozilla/spicedham,mozilla/spicedham
|
from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
is_not_blank = lambda x: x != ''
text = filter(is_not_blank, text)
lower_case = lambda x: x.lower()
text = map(lower_case, text)
return text
Make mapping & filtering into a list comprehension
|
from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
text = [token.lower() for token in text if token]
return text
|
<commit_before>from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
is_not_blank = lambda x: x != ''
text = filter(is_not_blank, text)
lower_case = lambda x: x.lower()
text = map(lower_case, text)
return text
<commit_msg>Make mapping & filtering into a list comprehension<commit_after>
|
from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
text = [token.lower() for token in text if token]
return text
|
from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
is_not_blank = lambda x: x != ''
text = filter(is_not_blank, text)
lower_case = lambda x: x.lower()
text = map(lower_case, text)
return text
Make mapping & filtering into a list comprehensionfrom re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
text = [token.lower() for token in text if token]
return text
|
<commit_before>from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
is_not_blank = lambda x: x != ''
text = filter(is_not_blank, text)
lower_case = lambda x: x.lower()
text = map(lower_case, text)
return text
<commit_msg>Make mapping & filtering into a list comprehension<commit_after>from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
def tokenize(self, text):
text = split('[ ,.?!\n\r]', text)
text = [token.lower() for token in text if token]
return text
|
1f90a3d733de99cc9c412cdd559ed3ad26519acc
|
autoencoder/api.py
|
autoencoder/api.py
|
from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model)
return encoded
|
from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None, reduced=False,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model, reduced=reduced)
return encoded
|
Add reduce option to API
|
Add reduce option to API
|
Python
|
apache-2.0
|
theislab/dca,theislab/dca,theislab/dca
|
from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model)
return encoded
Add reduce option to API
|
from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None, reduced=False,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model, reduced=reduced)
return encoded
|
<commit_before>from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model)
return encoded
<commit_msg>Add reduce option to API<commit_after>
|
from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None, reduced=False,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model, reduced=reduced)
return encoded
|
from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model)
return encoded
Add reduce option to APIfrom .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None, reduced=False,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model, reduced=reduced)
return encoded
|
<commit_before>from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model)
return encoded
<commit_msg>Add reduce option to API<commit_after>from .io import preprocess
from .train import train
from .encode import encode
def autoencode(count_matrix, kfold=None, reduced=False,
censor_matrix=None, type='normal',
learning_rate=1e-2,
hidden_size=10,
epochs=10):
x = preprocess(count_matrix, kfold=kfold, censor=censor_matrix)
model = train(x, hidden_size=hidden_size, learning_rate=learning_rate,
aetype=type, epochs=epochs)
encoded = encode(count_matrix, model, reduced=reduced)
return encoded
|
6bba72aca76a8c40b615bfbde7e7a74d705f1012
|
avalon/__init__.py
|
avalon/__init__.py
|
# -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
|
# -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0-DEV'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
|
Add -DEV suffix while version has not been released
|
Add -DEV suffix while version has not been released
|
Python
|
mit
|
tshlabs/avalonms
|
# -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
Add -DEV suffix while version has not been released
|
# -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0-DEV'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
|
<commit_before># -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
<commit_msg>Add -DEV suffix while version has not been released<commit_after>
|
# -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0-DEV'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
|
# -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
Add -DEV suffix while version has not been released# -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0-DEV'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
|
<commit_before># -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
<commit_msg>Add -DEV suffix while version has not been released<commit_after># -*- coding: utf-8 -*-
#
# Avalon Music Server
#
# Copyright 2012-2014 TSH Labs <projects@tshlabs.org>
#
# Available under the MIT license. See LICENSE for details.
#
"""Supporting library for the Avalon Music Server."""
from __future__ import unicode_literals
__all__ = [
'__version__',
'DEFAULT_ENCODING'
]
__version__ = '0.3.0-DEV'
# Character sets are hard, let's go shopping!
DEFAULT_ENCODING = 'utf-8'
|
679f20fc8747020f08f1e18a47772b18d886d29f
|
circuit/_twisted.py
|
circuit/_twisted.py
|
# Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
print "EXIT"
if exc_type is defer._DefGen_Return:
print "GOT IT"
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
# Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
if exc_type is defer._DefGen_Return:
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
Remove print statements from TwistedCircuitBreaker
|
Remove print statements from TwistedCircuitBreaker
|
Python
|
apache-2.0
|
edgeware/python-circuit
|
# Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
print "EXIT"
if exc_type is defer._DefGen_Return:
print "GOT IT"
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
Remove print statements from TwistedCircuitBreaker
|
# Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
if exc_type is defer._DefGen_Return:
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
<commit_before># Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
print "EXIT"
if exc_type is defer._DefGen_Return:
print "GOT IT"
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
<commit_msg>Remove print statements from TwistedCircuitBreaker<commit_after>
|
# Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
if exc_type is defer._DefGen_Return:
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
# Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
print "EXIT"
if exc_type is defer._DefGen_Return:
print "GOT IT"
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
Remove print statements from TwistedCircuitBreaker# Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
if exc_type is defer._DefGen_Return:
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
<commit_before># Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
print "EXIT"
if exc_type is defer._DefGen_Return:
print "GOT IT"
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
<commit_msg>Remove print statements from TwistedCircuitBreaker<commit_after># Copyright 2012 Edgeware AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from circuit.breaker import CircuitBreaker, CircuitBreakerSet
try:
from twisted.internet import defer
except ImportError:
pass
class TwistedCircuitBreaker(CircuitBreaker):
"""Circuit breaker that know that L{defer.inlineCallbacks} use
exceptions in its internal workings.
"""
def __exit__(self, exc_type, exc_val, tb):
if exc_type is defer._DefGen_Return:
exc_type, exc_val, tb = None, None, None
return CircuitBreaker.__exit__(self, exc_type, exc_val, tb)
class TwistedCircuitBreakerSet(CircuitBreakerSet):
"""Circuit breaker that supports twisted."""
def __init__(self, reactor, logger, **kwargs):
kwargs.update({'factory': TwistedCircuitBreaker})
CircuitBreakerSet.__init__(self, reactor.seconds, logger,
**kwargs)
|
8e6eb543fb011ff85fc548f41ec8def183b1b8df
|
class4/exercise4.py
|
class4/exercise4.py
|
#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()
|
# Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()
|
Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
|
Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
|
Python
|
apache-2.0
|
linkdebian/pynet_course
|
#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
|
# Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()<commit_msg>Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.<commit_after>
|
# Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.# Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()<commit_msg>Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.<commit_after># Use PExpect to change the logging buffer size (logging buffered <size>) on pynet-rtr2. Verify this change by examining the output of 'show run'.
#!/usr/bin/env python
import sys
from getpass import getpass
import time
import pexpect
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
port = 8022
password = getpass()
# ssh -l pyclass 50.76.53.27 -p 8022
ssh_conn = pexpect.spawn('ssh -l {} {} -p {}'.format(username, ip_addr, port))
#Useful for debugging the session
#ssh_conn.logfile = sys.stdout
ssh_conn.timeout = 3
ssh_conn.expect('ssword:')
time.sleep(1)
ssh_conn.sendline(password)
ssh_conn.expect('#')
ssh_conn.sendline('terminal lenght 0')
ssh_conn.expect('#')
ssh_conn.sendline('configure terminal')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('logging buffered 20000')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('exit')
time.sleep(1)
ssh_conn.expect('#')
ssh_conn.sendline('show running-config')
time.sleep(2)
ssh_conn.expect('no logging console')
# Print the output of the show ip int brief command.
print ssh_conn.before
if __name__ == "__main__":
main()
|
2bafbe1539b0c94a3fcad806e36af1f152fb71fe
|
tests/naming_tests.py
|
tests/naming_tests.py
|
from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", "nginx")
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
|
from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", None)
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
|
Fix test by actually setting package source to have no name
|
Fix test by actually setting package source to have no name
|
Python
|
bsd-2-clause
|
mwilliamson/whack
|
from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", "nginx")
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
Fix test by actually setting package source to have no name
|
from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", None)
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
|
<commit_before>from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", "nginx")
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
<commit_msg>Fix test by actually setting package source to have no name<commit_after>
|
from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", None)
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
|
from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", "nginx")
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
Fix test by actually setting package source to have no namefrom nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", None)
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
|
<commit_before>from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", "nginx")
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
<commit_msg>Fix test by actually setting package source to have no name<commit_after>from nose.tools import istest, assert_equal
from whack.naming import PackageNamer
@istest
def package_with_unnamed_source_has_name_equal_to_install_identifier():
package_source = PackageSource("/tmp/nginx-src", None)
package_name = _name_package(package_source, {})
assert_equal("install-id(/tmp/nginx-src, {})", package_name)
def _name_package(package_source, params):
package_namer = PackageNamer(_generate_install_id)
return package_namer.name_package(package_source, {})
def _generate_install_id(source_dir_path, params):
return "install-id({0}, {1})".format(source_dir_path, params)
class PackageSource(object):
def __init__(self, path, name):
self.path = path
self._name = name
def name(self):
return self._name
|
ec72bfd00fdb81415efac782d224b17e534849c4
|
mfh.py
|
mfh.py
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
if args.updater:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
if args.client is not None:
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive() or server_process.is_alive():
time.sleep(5)
else:
if args.updater:
# update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
main()
|
Add condition to only launch server if -s or --server is specified
|
Add condition to only launch server if -s or --server is specified
Now you can launch client, server or updater on its own, launch
nothing, or launch the whole thing altogether!
|
Python
|
mit
|
Zloool/manyfaced-honeypot
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
if args.updater:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
Add condition to only launch server if -s or --server is specified
Now you can launch client, server or updater on its own, launch
nothing, or launch the whole thing altogether!
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
if args.client is not None:
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive() or server_process.is_alive():
time.sleep(5)
else:
if args.updater:
# update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
main()
|
<commit_before>import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
if args.updater:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
<commit_msg>Add condition to only launch server if -s or --server is specified
Now you can launch client, server or updater on its own, launch
nothing, or launch the whole thing altogether!<commit_after>
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
if args.client is not None:
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive() or server_process.is_alive():
time.sleep(5)
else:
if args.updater:
# update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
main()
|
import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
if args.updater:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
Add condition to only launch server if -s or --server is specified
Now you can launch client, server or updater on its own, launch
nothing, or launch the whole thing altogether!import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
if args.client is not None:
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive() or server_process.is_alive():
time.sleep(5)
else:
if args.updater:
# update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
main()
|
<commit_before>import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive():
time.sleep(5)
else:
if args.updater:
update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
main()
<commit_msg>Add condition to only launch server if -s or --server is specified
Now you can launch client, server or updater on its own, launch
nothing, or launch the whole thing altogether!<commit_after>import os
import sys
import time
from multiprocessing import Process, Event
import mfhclient
import server
import update
from arguments import parse
from settings import HONEYPORT, HIVEPORT
def main():
update_event = Event()
mfhclient_process = Process(
args=(args, update_event,),
name="mfhclient_process",
target=mfhclient.main,
)
server_process = Process(
args=(args, update_event,),
name="server_process",
target=server.main,
)
if args.client is not None:
mfhclient_process.start()
if args.client is not None:
server_process.start()
if args.updater:
trigger_process = Process(
args=(update_event,),
name="trigger_process",
target=update.trigger,
)
trigger_process.start()
trigger_process.join()
while mfhclient_process.is_alive() or server_process.is_alive():
time.sleep(5)
else:
if args.updater:
# update.pull("origin", "master")
sys.stdout.flush()
os.execl(sys.executable, sys.executable, *sys.argv)
if __name__ == '__main__':
# Parse arguments
args = parse()
if args.c:
args.client = HONEYPORT
if args.s:
args.server = HIVEPORT
main()
|
d7a3bcf72df3cededc4220f46f976a0daef539a6
|
marvin/tests/__init__.py
|
marvin/tests/__init__.py
|
from marvin import create_app
import unittest
class AppCreationTest(unittest.TestCase):
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
|
from marvin import create_app
import os
import tempfile
import unittest
class AppCreationTest(unittest.TestCase):
def setUp(self):
self.config_file = tempfile.NamedTemporaryFile(delete=False)
self.config_file.write('OTHER_CONFIG = "bar"'.encode('utf-8'))
self.config_file.close()
def tearDown(self):
os.remove(self.config_file.name)
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
def test_create_app_with_config_file(self):
app = create_app(self.config_file.name)
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
def test_create_app_both(self):
app = create_app(self.config_file.name, EXTRA_PARAM='baz')
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
self.assertEqual(app.config['EXTRA_PARAM'], 'baz')
|
Add test for app creation with config file.
|
Add test for app creation with config file.
Also be explicit about encoding when writing to file.
|
Python
|
mit
|
streamr/marvin,streamr/marvin,streamr/marvin
|
from marvin import create_app
import unittest
class AppCreationTest(unittest.TestCase):
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
Add test for app creation with config file.
Also be explicit about encoding when writing to file.
|
from marvin import create_app
import os
import tempfile
import unittest
class AppCreationTest(unittest.TestCase):
def setUp(self):
self.config_file = tempfile.NamedTemporaryFile(delete=False)
self.config_file.write('OTHER_CONFIG = "bar"'.encode('utf-8'))
self.config_file.close()
def tearDown(self):
os.remove(self.config_file.name)
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
def test_create_app_with_config_file(self):
app = create_app(self.config_file.name)
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
def test_create_app_both(self):
app = create_app(self.config_file.name, EXTRA_PARAM='baz')
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
self.assertEqual(app.config['EXTRA_PARAM'], 'baz')
|
<commit_before>from marvin import create_app
import unittest
class AppCreationTest(unittest.TestCase):
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
<commit_msg>Add test for app creation with config file.
Also be explicit about encoding when writing to file.<commit_after>
|
from marvin import create_app
import os
import tempfile
import unittest
class AppCreationTest(unittest.TestCase):
def setUp(self):
self.config_file = tempfile.NamedTemporaryFile(delete=False)
self.config_file.write('OTHER_CONFIG = "bar"'.encode('utf-8'))
self.config_file.close()
def tearDown(self):
os.remove(self.config_file.name)
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
def test_create_app_with_config_file(self):
app = create_app(self.config_file.name)
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
def test_create_app_both(self):
app = create_app(self.config_file.name, EXTRA_PARAM='baz')
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
self.assertEqual(app.config['EXTRA_PARAM'], 'baz')
|
from marvin import create_app
import unittest
class AppCreationTest(unittest.TestCase):
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
Add test for app creation with config file.
Also be explicit about encoding when writing to file.from marvin import create_app
import os
import tempfile
import unittest
class AppCreationTest(unittest.TestCase):
def setUp(self):
self.config_file = tempfile.NamedTemporaryFile(delete=False)
self.config_file.write('OTHER_CONFIG = "bar"'.encode('utf-8'))
self.config_file.close()
def tearDown(self):
os.remove(self.config_file.name)
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
def test_create_app_with_config_file(self):
app = create_app(self.config_file.name)
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
def test_create_app_both(self):
app = create_app(self.config_file.name, EXTRA_PARAM='baz')
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
self.assertEqual(app.config['EXTRA_PARAM'], 'baz')
|
<commit_before>from marvin import create_app
import unittest
class AppCreationTest(unittest.TestCase):
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
<commit_msg>Add test for app creation with config file.
Also be explicit about encoding when writing to file.<commit_after>from marvin import create_app
import os
import tempfile
import unittest
class AppCreationTest(unittest.TestCase):
def setUp(self):
self.config_file = tempfile.NamedTemporaryFile(delete=False)
self.config_file.write('OTHER_CONFIG = "bar"'.encode('utf-8'))
self.config_file.close()
def tearDown(self):
os.remove(self.config_file.name)
def test_create_app(self):
app = create_app(MY_CONFIG_VALUE='foo')
self.assertEqual(app.config['MY_CONFIG_VALUE'], 'foo')
def test_create_app_with_config_file(self):
app = create_app(self.config_file.name)
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
def test_create_app_both(self):
app = create_app(self.config_file.name, EXTRA_PARAM='baz')
self.assertEqual(app.config['OTHER_CONFIG'], 'bar')
self.assertEqual(app.config['EXTRA_PARAM'], 'baz')
|
4d6afb50094de9f80f686115edf9bc4abfc85bd3
|
webui/__init__.py
|
webui/__init__.py
|
#!/usr/bin/env python
#
# Copyright 2012 Ajay Narayan, Madhusudan C.S., Shobhit N.S.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
Add license to the package initalizer.
|
Add license to the package initalizer.
|
Python
|
apache-2.0
|
madhusudancs/sentiment-analyzer,madhusudancs/sentiment-analyzer
|
Add license to the package initalizer.
|
#!/usr/bin/env python
#
# Copyright 2012 Ajay Narayan, Madhusudan C.S., Shobhit N.S.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
<commit_before><commit_msg>Add license to the package initalizer.<commit_after>
|
#!/usr/bin/env python
#
# Copyright 2012 Ajay Narayan, Madhusudan C.S., Shobhit N.S.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
Add license to the package initalizer.#!/usr/bin/env python
#
# Copyright 2012 Ajay Narayan, Madhusudan C.S., Shobhit N.S.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
<commit_before><commit_msg>Add license to the package initalizer.<commit_after>#!/usr/bin/env python
#
# Copyright 2012 Ajay Narayan, Madhusudan C.S., Shobhit N.S.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
|
ec52babd52abce01873b8452f00b01c651c2deef
|
zappa/__init__.py
|
zappa/__init__.py
|
import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
python_major_version = sys.version_info[0]
python_minor_version = sys.version_info[1]
if (python_major_version, python_minor_version) not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(mav, miv) for mav, miv in SUPPORTED_VERSIONS]
err_msg = 'This version of Python ({}.{}) is not supported!\n'.format(python_major_version, python_minor_version) +\
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions)
raise RuntimeError(err_msg)
__version__ = '0.47.1'
|
import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
if sys.version_info[:2] not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(*version) for version in SUPPORTED_VERSIONS]
err_msg = ('This version of Python ({}.{}) is not supported!\n'.format(*sys.version_info) +
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions))
raise RuntimeError(err_msg)
__version__ = '0.47.1'
|
Simplify Python version detection and remove the backslash
|
Simplify Python version detection and remove the backslash
|
Python
|
mit
|
Miserlou/Zappa,Miserlou/Zappa
|
import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
python_major_version = sys.version_info[0]
python_minor_version = sys.version_info[1]
if (python_major_version, python_minor_version) not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(mav, miv) for mav, miv in SUPPORTED_VERSIONS]
err_msg = 'This version of Python ({}.{}) is not supported!\n'.format(python_major_version, python_minor_version) +\
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions)
raise RuntimeError(err_msg)
__version__ = '0.47.1'
Simplify Python version detection and remove the backslash
|
import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
if sys.version_info[:2] not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(*version) for version in SUPPORTED_VERSIONS]
err_msg = ('This version of Python ({}.{}) is not supported!\n'.format(*sys.version_info) +
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions))
raise RuntimeError(err_msg)
__version__ = '0.47.1'
|
<commit_before>import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
python_major_version = sys.version_info[0]
python_minor_version = sys.version_info[1]
if (python_major_version, python_minor_version) not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(mav, miv) for mav, miv in SUPPORTED_VERSIONS]
err_msg = 'This version of Python ({}.{}) is not supported!\n'.format(python_major_version, python_minor_version) +\
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions)
raise RuntimeError(err_msg)
__version__ = '0.47.1'
<commit_msg>Simplify Python version detection and remove the backslash<commit_after>
|
import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
if sys.version_info[:2] not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(*version) for version in SUPPORTED_VERSIONS]
err_msg = ('This version of Python ({}.{}) is not supported!\n'.format(*sys.version_info) +
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions))
raise RuntimeError(err_msg)
__version__ = '0.47.1'
|
import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
python_major_version = sys.version_info[0]
python_minor_version = sys.version_info[1]
if (python_major_version, python_minor_version) not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(mav, miv) for mav, miv in SUPPORTED_VERSIONS]
err_msg = 'This version of Python ({}.{}) is not supported!\n'.format(python_major_version, python_minor_version) +\
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions)
raise RuntimeError(err_msg)
__version__ = '0.47.1'
Simplify Python version detection and remove the backslashimport sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
if sys.version_info[:2] not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(*version) for version in SUPPORTED_VERSIONS]
err_msg = ('This version of Python ({}.{}) is not supported!\n'.format(*sys.version_info) +
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions))
raise RuntimeError(err_msg)
__version__ = '0.47.1'
|
<commit_before>import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
python_major_version = sys.version_info[0]
python_minor_version = sys.version_info[1]
if (python_major_version, python_minor_version) not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(mav, miv) for mav, miv in SUPPORTED_VERSIONS]
err_msg = 'This version of Python ({}.{}) is not supported!\n'.format(python_major_version, python_minor_version) +\
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions)
raise RuntimeError(err_msg)
__version__ = '0.47.1'
<commit_msg>Simplify Python version detection and remove the backslash<commit_after>import sys
SUPPORTED_VERSIONS = [(2, 7), (3, 6), (3, 7)]
if sys.version_info[:2] not in SUPPORTED_VERSIONS:
formatted_supported_versions = ['{}.{}'.format(*version) for version in SUPPORTED_VERSIONS]
err_msg = ('This version of Python ({}.{}) is not supported!\n'.format(*sys.version_info) +
'Zappa (and AWS Lambda) support the following versions of Python: {}'.format(formatted_supported_versions))
raise RuntimeError(err_msg)
__version__ = '0.47.1'
|
e5c12ba8814d835a8e44b1e22316ae91aac3092f
|
run.py
|
run.py
|
from datetime import datetime
import app.models
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s %(funcName)s() - %(message)s')
def main():
logging.info('Start')
# TODO instantiate from JSON - perhaps add functionality in Software __init__()
sw = app.models.Software(software_id=None,
name='Climate Analysis',
description='Contrived code for Software Carpentry workshops',
version='1.0',
submitter='JSR',
submitted=datetime.utcnow(),
url='https://github.com/js-robinson/climate-analysis')
# process_software(sw)
if __name__ == '__main__':
main()
|
from app import app
# Run this to start the webapp
app.run(debug=True)
app.logger.debug("Started up...")
|
Move to web app from CLI
|
Move to web app from CLI
|
Python
|
bsd-3-clause
|
softwaresaved/software-assessment-framework,softwaresaved/software-assessment-framework
|
from datetime import datetime
import app.models
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s %(funcName)s() - %(message)s')
def main():
logging.info('Start')
# TODO instantiate from JSON - perhaps add functionality in Software __init__()
sw = app.models.Software(software_id=None,
name='Climate Analysis',
description='Contrived code for Software Carpentry workshops',
version='1.0',
submitter='JSR',
submitted=datetime.utcnow(),
url='https://github.com/js-robinson/climate-analysis')
# process_software(sw)
if __name__ == '__main__':
main()
Move to web app from CLI
|
from app import app
# Run this to start the webapp
app.run(debug=True)
app.logger.debug("Started up...")
|
<commit_before>from datetime import datetime
import app.models
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s %(funcName)s() - %(message)s')
def main():
logging.info('Start')
# TODO instantiate from JSON - perhaps add functionality in Software __init__()
sw = app.models.Software(software_id=None,
name='Climate Analysis',
description='Contrived code for Software Carpentry workshops',
version='1.0',
submitter='JSR',
submitted=datetime.utcnow(),
url='https://github.com/js-robinson/climate-analysis')
# process_software(sw)
if __name__ == '__main__':
main()
<commit_msg>Move to web app from CLI<commit_after>
|
from app import app
# Run this to start the webapp
app.run(debug=True)
app.logger.debug("Started up...")
|
from datetime import datetime
import app.models
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s %(funcName)s() - %(message)s')
def main():
logging.info('Start')
# TODO instantiate from JSON - perhaps add functionality in Software __init__()
sw = app.models.Software(software_id=None,
name='Climate Analysis',
description='Contrived code for Software Carpentry workshops',
version='1.0',
submitter='JSR',
submitted=datetime.utcnow(),
url='https://github.com/js-robinson/climate-analysis')
# process_software(sw)
if __name__ == '__main__':
main()
Move to web app from CLIfrom app import app
# Run this to start the webapp
app.run(debug=True)
app.logger.debug("Started up...")
|
<commit_before>from datetime import datetime
import app.models
import logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s %(funcName)s() - %(message)s')
def main():
logging.info('Start')
# TODO instantiate from JSON - perhaps add functionality in Software __init__()
sw = app.models.Software(software_id=None,
name='Climate Analysis',
description='Contrived code for Software Carpentry workshops',
version='1.0',
submitter='JSR',
submitted=datetime.utcnow(),
url='https://github.com/js-robinson/climate-analysis')
# process_software(sw)
if __name__ == '__main__':
main()
<commit_msg>Move to web app from CLI<commit_after>from app import app
# Run this to start the webapp
app.run(debug=True)
app.logger.debug("Started up...")
|
23419cf96eb2f9d45b60cfbc085d9a77190c40b5
|
django_lightweight_queue/job.py
|
django_lightweight_queue/job.py
|
import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
|
import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
@classmethod
def from_json(cls, json):
return cls(**simplejson.loads(json))
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
|
Add ability to generate a Job easily.
|
Add ability to generate a Job easily.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
Python
|
bsd-3-clause
|
prophile/django-lightweight-queue,prophile/django-lightweight-queue,thread/django-lightweight-queue,thread/django-lightweight-queue,lamby/django-lightweight-queue
|
import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
Add ability to generate a Job easily.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
@classmethod
def from_json(cls, json):
return cls(**simplejson.loads(json))
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
|
<commit_before>import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
<commit_msg>Add ability to generate a Job easily.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>
|
import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
@classmethod
def from_json(cls, json):
return cls(**simplejson.loads(json))
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
|
import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
Add ability to generate a Job easily.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
@classmethod
def from_json(cls, json):
return cls(**simplejson.loads(json))
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
|
<commit_before>import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
<commit_msg>Add ability to generate a Job easily.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>import sys
import time
from django.utils import simplejson
from .utils import get_path, get_middleware
class Job(object):
def __init__(self, path, args, kwargs):
self.path = path
self.args = args
self.kwargs = kwargs
@classmethod
def from_json(cls, json):
return cls(**simplejson.loads(json))
def run(self):
start = time.time()
middleware = get_middleware()
for instance in middleware:
if hasattr(instance, 'process_job'):
instance.process_job(self)
try:
result = self.get_fn().fn(*self.args, **self.kwargs)
time_taken = time.time() - start
for instance in middleware:
if hasattr(instance, 'process_result'):
instance.process_result(self, result, time_taken)
except Exception, exc:
time_taken = time.time() - start
exc_info = sys.exc_info()
for instance in middleware:
if hasattr(instance, 'process_exception'):
instance.process_exception(self, time_taken, *exc_info)
def validate(self):
# Ensure these execute without exception so that we cannot enqueue
# things that are impossible to dequeue.
self.get_fn()
self.to_json()
def get_fn(self):
return get_path(self.path)
def to_json(self):
return simplejson.dumps({
'path': self.path,
'args': self.args,
'kwargs': self.kwargs,
})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.