commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
fa78c5b5442c904ba3888b858eb2c284f16664ed
|
pages/urls/page.py
|
pages/urls/page.py
|
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = patterns('',
url(r'', include(router.urls)),
)
|
from django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = [
url(r'', include(router.urls)),
]
|
Purge unnecessary patterns function from urls
|
Purge unnecessary patterns function from urls
|
Python
|
bsd-2-clause
|
incuna/feincms-pages-api
|
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = patterns('',
url(r'', include(router.urls)),
)
Purge unnecessary patterns function from urls
|
from django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = [
url(r'', include(router.urls)),
]
|
<commit_before>from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = patterns('',
url(r'', include(router.urls)),
)
<commit_msg>Purge unnecessary patterns function from urls<commit_after>
|
from django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = [
url(r'', include(router.urls)),
]
|
from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = patterns('',
url(r'', include(router.urls)),
)
Purge unnecessary patterns function from urlsfrom django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = [
url(r'', include(router.urls)),
]
|
<commit_before>from django.conf.urls import include, patterns, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = patterns('',
url(r'', include(router.urls)),
)
<commit_msg>Purge unnecessary patterns function from urls<commit_after>from django.conf.urls import include, url
from rest_framework.routers import SimpleRouter
from .. import views
router = SimpleRouter(trailing_slash=False)
router.register(r'pages', views.PageViewSet)
urlpatterns = [
url(r'', include(router.urls)),
]
|
13f3d7d4a708cd05712b610d979dcf857ae85856
|
Agents/SentinelDefense.py
|
Agents/SentinelDefense.py
|
from pysc2.agents import base_agents
from pysc2.lib import actions
## SENTINEL FUNCTIONS
# Functions related with Hallucination
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
# Functions
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
# Functions related with attack
|
from pysc2.agents import base_agents
from pysc2.lib import actions
Class Sentry():
'''Defines how the sentry SC2 unit works'''
def Force_Field():
'''Function related with Force Field creation'''
_FORCE_FIELD = actions.FUNCTIONS.Effect_ForceField_screen.id
def Guardian_Shield():
'''Function related with Shield creation'''
_GUARD_FIELD = actions.FUNCTIONS.Effect_GuardianShield_quick.id
def Hallucinations():
'''Functions related with Hallucination'''
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
def Standard_Functions():
'''Standard Functions related with movements and exploration '''
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
|
Define class sentry with main actions
|
Define class sentry with main actions
|
Python
|
apache-2.0
|
SoyGema/Startcraft_pysc2_minigames
|
from pysc2.agents import base_agents
from pysc2.lib import actions
## SENTINEL FUNCTIONS
# Functions related with Hallucination
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
# Functions
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
# Functions related with attack
Define class sentry with main actions
|
from pysc2.agents import base_agents
from pysc2.lib import actions
Class Sentry():
'''Defines how the sentry SC2 unit works'''
def Force_Field():
'''Function related with Force Field creation'''
_FORCE_FIELD = actions.FUNCTIONS.Effect_ForceField_screen.id
def Guardian_Shield():
'''Function related with Shield creation'''
_GUARD_FIELD = actions.FUNCTIONS.Effect_GuardianShield_quick.id
def Hallucinations():
'''Functions related with Hallucination'''
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
def Standard_Functions():
'''Standard Functions related with movements and exploration '''
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
|
<commit_before>
from pysc2.agents import base_agents
from pysc2.lib import actions
## SENTINEL FUNCTIONS
# Functions related with Hallucination
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
# Functions
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
# Functions related with attack
<commit_msg>Define class sentry with main actions<commit_after>
|
from pysc2.agents import base_agents
from pysc2.lib import actions
Class Sentry():
'''Defines how the sentry SC2 unit works'''
def Force_Field():
'''Function related with Force Field creation'''
_FORCE_FIELD = actions.FUNCTIONS.Effect_ForceField_screen.id
def Guardian_Shield():
'''Function related with Shield creation'''
_GUARD_FIELD = actions.FUNCTIONS.Effect_GuardianShield_quick.id
def Hallucinations():
'''Functions related with Hallucination'''
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
def Standard_Functions():
'''Standard Functions related with movements and exploration '''
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
|
from pysc2.agents import base_agents
from pysc2.lib import actions
## SENTINEL FUNCTIONS
# Functions related with Hallucination
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
# Functions
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
# Functions related with attack
Define class sentry with main actions
from pysc2.agents import base_agents
from pysc2.lib import actions
Class Sentry():
'''Defines how the sentry SC2 unit works'''
def Force_Field():
'''Function related with Force Field creation'''
_FORCE_FIELD = actions.FUNCTIONS.Effect_ForceField_screen.id
def Guardian_Shield():
'''Function related with Shield creation'''
_GUARD_FIELD = actions.FUNCTIONS.Effect_GuardianShield_quick.id
def Hallucinations():
'''Functions related with Hallucination'''
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
def Standard_Functions():
'''Standard Functions related with movements and exploration '''
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
|
<commit_before>
from pysc2.agents import base_agents
from pysc2.lib import actions
## SENTINEL FUNCTIONS
# Functions related with Hallucination
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
# Functions
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
# Functions related with attack
<commit_msg>Define class sentry with main actions<commit_after>
from pysc2.agents import base_agents
from pysc2.lib import actions
Class Sentry():
'''Defines how the sentry SC2 unit works'''
def Force_Field():
'''Function related with Force Field creation'''
_FORCE_FIELD = actions.FUNCTIONS.Effect_ForceField_screen.id
def Guardian_Shield():
'''Function related with Shield creation'''
_GUARD_FIELD = actions.FUNCTIONS.Effect_GuardianShield_quick.id
def Hallucinations():
'''Functions related with Hallucination'''
_HAL_ADEPT = actions.FUNCTIONS.Hallucination_Adept_quick.id
_HAL_ARCHON = actions.FUNCTIONS.Hallucination_Archon_quick.id
_HAL_COL = actions.FUNCTIONS.Hallucination_Colossus_quick.id
_HAL_DISRUP = actions.FUNCTIONS.Hallucination_Disruptor_quick.id
_HAL_HIGTEM = actions.FUNCTIONS.Hallucination_HighTemplar_quick.id
_HAL_IMN = actions.FUNCTIONS.Hallucination_Immortal_quick.id
_HAL_PHOENIX = actions.FUNCTIONS.Hallucination_Phoenix_quick.id
_HAL_STALKER = actions.FUNCTIONS.Hallucination_Stalker_quick.id
_HAL_VOIDRAID = actions.FUNCTIONS.Hallucination_VoidRay_quick.id
_HAL_ZEALOT = actions.FUNCTIONS.Hallucination_Zealot_quick.id
def Standard_Functions():
'''Standard Functions related with movements and exploration '''
_NOOP = actions.FUNCTIONS.no_op.id
_SELECT_POINT = actions.FUNCTIONS.select_point.id
|
1c01b9e794445242c450534d1615a9dc755b89da
|
randcat.py
|
randcat.py
|
import random
random.seed()
while True:
print(chr(random.getrandbits(8)), end='')
|
#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
|
Add some comments and a shebang on top.
|
Add some comments and a shebang on top.
|
Python
|
apache-2.0
|
Tombert/RandCat
|
import random
random.seed()
while True:
print(chr(random.getrandbits(8)), end='')
Add some comments and a shebang on top.
|
#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
|
<commit_before>import random
random.seed()
while True:
print(chr(random.getrandbits(8)), end='')
<commit_msg>Add some comments and a shebang on top.<commit_after>
|
#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
|
import random
random.seed()
while True:
print(chr(random.getrandbits(8)), end='')
Add some comments and a shebang on top.#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
|
<commit_before>import random
random.seed()
while True:
print(chr(random.getrandbits(8)), end='')
<commit_msg>Add some comments and a shebang on top.<commit_after>#! /usr/bin/python3
import random
random.seed() # this initializes with the Date, which I think is a novel enough seed
while True: # if we're going with a mimicing of cat /dev/random, it'll pretty much just go until it's killed
print(chr(random.getrandbits(8)), end='')
|
f897c6e6e592990291983b29324a1e85fc636b2a
|
python/setup.py
|
python/setup.py
|
from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
install_requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
|
from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
|
Move simplejson to requires, not install_requires.
|
Move simplejson to requires, not install_requires.
|
Python
|
agpl-3.0
|
ISIFoundation/influenzanet-epidb-client
|
from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
install_requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
Move simplejson to requires, not install_requires.
|
from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
|
<commit_before>from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
install_requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
<commit_msg>Move simplejson to requires, not install_requires.<commit_after>
|
from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
|
from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
install_requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
Move simplejson to requires, not install_requires.from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
|
<commit_before>from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
install_requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
<commit_msg>Move simplejson to requires, not install_requires.<commit_after>from setuptools import setup
import sys
sys.path += ['src']
setup(
name = "epidb-client",
version = '0.1.2',
url = 'http://www.epiwork.eu/',
description = 'EPIWork Database - Client Code',
author = 'Fajran Iman Rusadi',
package_dir = {'': 'src'},
packages = ['epidb_client'],
requires = ['simplejson'],
test_suite = 'epidb_client.tests',
)
|
28940582fcff57b66e702dfecfd96e83725fbab0
|
leisure/__init__.py
|
leisure/__init__.py
|
# -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
import os
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
script_dir = os.path.abspath(os.path.dirname(script))
if script_dir not in [os.path.abspath(p) for p in sys.path]:
sys.path.append(script_dir)
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
|
Add script's path to the python path
|
Add script's path to the python path
|
Python
|
mit
|
trivio/leisure
|
# -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
Add script's path to the python path
|
# -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
import os
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
script_dir = os.path.abspath(os.path.dirname(script))
if script_dir not in [os.path.abspath(p) for p in sys.path]:
sys.path.append(script_dir)
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
|
<commit_before># -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
<commit_msg>Add script's path to the python path<commit_after>
|
# -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
import os
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
script_dir = os.path.abspath(os.path.dirname(script))
if script_dir not in [os.path.abspath(p) for p in sys.path]:
sys.path.append(script_dir)
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
Add script's path to the python path# -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
import os
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
script_dir = os.path.abspath(os.path.dirname(script))
if script_dir not in [os.path.abspath(p) for p in sys.path]:
sys.path.append(script_dir)
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
|
<commit_before># -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
<commit_msg>Add script's path to the python path<commit_after># -*- coding: utf-8 -*-
"""
leisure
~~~~~~~~
Leisure a local job runner for Disco based project.
It provides a useful method for running your disco project without
needing a full disco cluster. This makes it a snap to develop and
debug jobs on your development machine.
To use, simply execute your disco script using the leisure
command like so:
$ leisure <path to script>/word_count.py
Leisure monkey patches all network calls to the Disco/DDFS master
so that it can intercept and execute them locally. The
worker itself is executed as a subprocess and communicated to via
the Disco Worker protocol
http://discoproject.org/doc/disco/howto/worker.html
:copyright: (c) 2011 by triv.io, see AUTHORS for more details.
:license: MIT, see LICENSE for more details.
"""
from __future__ import absolute_import
import sys
import os
from .disco import run_script
from . import shuffle
import tempfile
def main():
script = sys.argv[1]
script_dir = os.path.abspath(os.path.dirname(script))
if script_dir not in [os.path.abspath(p) for p in sys.path]:
sys.path.append(script_dir)
if len(sys.argv) == 3:
data_root = sys.argv[2]
else:
data_root = tempfile.mkdtemp()
run_script(script, data_root)
if __name__ == "__main__":
main()
|
589bc468783e6c7620c3be21195fdbe88e796234
|
linguist/helpers.py
|
linguist/helpers.py
|
# -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
|
# -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
if not isinstance(instances, collections.Iterable):
instances = [instances]
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
|
Fix prefetch_translations() -- be sure we only deal with iteratables.
|
Fix prefetch_translations() -- be sure we only deal with iteratables.
|
Python
|
mit
|
ulule/django-linguist
|
# -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
Fix prefetch_translations() -- be sure we only deal with iteratables.
|
# -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
if not isinstance(instances, collections.Iterable):
instances = [instances]
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
|
<commit_before># -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
<commit_msg>Fix prefetch_translations() -- be sure we only deal with iteratables.<commit_after>
|
# -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
if not isinstance(instances, collections.Iterable):
instances = [instances]
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
|
# -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
Fix prefetch_translations() -- be sure we only deal with iteratables.# -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
if not isinstance(instances, collections.Iterable):
instances = [instances]
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
|
<commit_before># -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
<commit_msg>Fix prefetch_translations() -- be sure we only deal with iteratables.<commit_after># -*- coding: utf-8 -*-
import collections
import itertools
from . import utils
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
if not isinstance(instances, collections.Iterable):
instances = [instances]
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations()
|
2f8a2fdad8deb96b7b3c971baf866f248c23fdda
|
madam_rest/views.py
|
madam_rest/views.py
|
from flask import jsonify, url_for
from madam_rest import app, asset_storage
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": {} # TODO: _mutable(asset.metadata)
})
|
from datetime import datetime
from flask import jsonify, url_for
from fractions import Fraction
from frozendict import frozendict
from madam_rest import app, asset_storage
def _serializable(value):
"""
Utility function to convert data structures with immutable types to
mutable, serializable data structures.
:param value: data structure with immutable types
:return: mutable, serializable data structure
"""
if isinstance(value, (tuple, set, frozenset)):
return [_serializable(v) for v in value]
elif isinstance(value, frozendict):
return {k: _serializable(v) for k, v in value.items()}
elif isinstance(value, datetime):
return value.isoformat()
elif isinstance(value, Fraction):
return float(value)
return value
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": _serializable(asset.metadata)
})
|
Improve serialization of asset metadata.
|
Improve serialization of asset metadata.
|
Python
|
agpl-3.0
|
eseifert/madam-rest
|
from flask import jsonify, url_for
from madam_rest import app, asset_storage
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": {} # TODO: _mutable(asset.metadata)
})
Improve serialization of asset metadata.
|
from datetime import datetime
from flask import jsonify, url_for
from fractions import Fraction
from frozendict import frozendict
from madam_rest import app, asset_storage
def _serializable(value):
"""
Utility function to convert data structures with immutable types to
mutable, serializable data structures.
:param value: data structure with immutable types
:return: mutable, serializable data structure
"""
if isinstance(value, (tuple, set, frozenset)):
return [_serializable(v) for v in value]
elif isinstance(value, frozendict):
return {k: _serializable(v) for k, v in value.items()}
elif isinstance(value, datetime):
return value.isoformat()
elif isinstance(value, Fraction):
return float(value)
return value
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": _serializable(asset.metadata)
})
|
<commit_before>from flask import jsonify, url_for
from madam_rest import app, asset_storage
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": {} # TODO: _mutable(asset.metadata)
})
<commit_msg>Improve serialization of asset metadata.<commit_after>
|
from datetime import datetime
from flask import jsonify, url_for
from fractions import Fraction
from frozendict import frozendict
from madam_rest import app, asset_storage
def _serializable(value):
"""
Utility function to convert data structures with immutable types to
mutable, serializable data structures.
:param value: data structure with immutable types
:return: mutable, serializable data structure
"""
if isinstance(value, (tuple, set, frozenset)):
return [_serializable(v) for v in value]
elif isinstance(value, frozendict):
return {k: _serializable(v) for k, v in value.items()}
elif isinstance(value, datetime):
return value.isoformat()
elif isinstance(value, Fraction):
return float(value)
return value
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": _serializable(asset.metadata)
})
|
from flask import jsonify, url_for
from madam_rest import app, asset_storage
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": {} # TODO: _mutable(asset.metadata)
})
Improve serialization of asset metadata.from datetime import datetime
from flask import jsonify, url_for
from fractions import Fraction
from frozendict import frozendict
from madam_rest import app, asset_storage
def _serializable(value):
"""
Utility function to convert data structures with immutable types to
mutable, serializable data structures.
:param value: data structure with immutable types
:return: mutable, serializable data structure
"""
if isinstance(value, (tuple, set, frozenset)):
return [_serializable(v) for v in value]
elif isinstance(value, frozendict):
return {k: _serializable(v) for k, v in value.items()}
elif isinstance(value, datetime):
return value.isoformat()
elif isinstance(value, Fraction):
return float(value)
return value
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": _serializable(asset.metadata)
})
|
<commit_before>from flask import jsonify, url_for
from madam_rest import app, asset_storage
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": {} # TODO: _mutable(asset.metadata)
})
<commit_msg>Improve serialization of asset metadata.<commit_after>from datetime import datetime
from flask import jsonify, url_for
from fractions import Fraction
from frozendict import frozendict
from madam_rest import app, asset_storage
def _serializable(value):
"""
Utility function to convert data structures with immutable types to
mutable, serializable data structures.
:param value: data structure with immutable types
:return: mutable, serializable data structure
"""
if isinstance(value, (tuple, set, frozenset)):
return [_serializable(v) for v in value]
elif isinstance(value, frozendict):
return {k: _serializable(v) for k, v in value.items()}
elif isinstance(value, datetime):
return value.isoformat()
elif isinstance(value, Fraction):
return float(value)
return value
@app.route('/assets/')
def assets_retrieve():
assets = [asset_key for asset_key in asset_storage]
return jsonify({
"data": assets,
"meta": {
"count": len(assets)
}
})
@app.route('/assets/<asset_key>')
def asset_retrieve(asset_key):
asset = asset_storage[asset_key]
return jsonify({
"links": {
"self": url_for(asset_retrieve, asset_key=asset_key)
},
"meta": _serializable(asset.metadata)
})
|
bae4032cc686fbac906d19456ed744a97b0e1365
|
characters/views.py
|
characters/views.py
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
race = Race.objects.get(id=1)
cclass = Class.objects.get(id=1)
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race=race,
cclass=cclass
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
Set default race and class without extra database queries
|
Set default race and class without extra database queries
|
Python
|
mit
|
mpirnat/django-tutorial-v2
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
race = Race.objects.get(id=1)
cclass = Class.objects.get(id=1)
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race=race,
cclass=cclass
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
Set default race and class without extra database queries
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
<commit_before>from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
race = Race.objects.get(id=1)
cclass = Class.objects.get(id=1)
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race=race,
cclass=cclass
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
<commit_msg>Set default race and class without extra database queries<commit_after>
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
race = Race.objects.get(id=1)
cclass = Class.objects.get(id=1)
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race=race,
cclass=cclass
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
Set default race and class without extra database queriesfrom django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
<commit_before>from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
race = Race.objects.get(id=1)
cclass = Class.objects.get(id=1)
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race=race,
cclass=cclass
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
<commit_msg>Set default race and class without extra database queries<commit_after>from django.shortcuts import get_object_or_404, redirect, render
from characters.forms import CharacterForm
from characters.models import Character, Class, Race
def index(request):
all_characters = Character.objects.all()
context = {'all_characters': all_characters}
return render(request, 'characters/index.html', context)
def view_character(request, character_id):
character = get_object_or_404(Character, pk=character_id)
context = {'character': character}
return render(request, 'characters/view_character.html', context)
def create_character(request):
form = CharacterForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
character = Character(
name=request.POST['name'],
background=request.POST['background'],
race_id=1,
cclass_id=1
)
character.save()
return redirect('characters:view', character_id=character.id)
context = {'form': form}
return render(request, 'characters/create_character.html', context)
|
1549aa29892ec30e0926b7a92d1c0d8857edc8d5
|
shub/utils.py
|
shub/utils.py
|
import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SH_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
|
import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SHUB_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
|
Fix prefix in environment variable that contains SH's key
|
Fix prefix in environment variable that contains SH's key
|
Python
|
bsd-3-clause
|
scrapinghub/shub
|
import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SH_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
Fix prefix in environment variable that contains SH's key
|
import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SHUB_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
|
<commit_before>import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SH_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
<commit_msg>Fix prefix in environment variable that contains SH's key<commit_after>
|
import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SHUB_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
|
import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SH_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
Fix prefix in environment variable that contains SH's keyimport imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SHUB_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
|
<commit_before>import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SH_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
<commit_msg>Fix prefix in environment variable that contains SH's key<commit_after>import imp, os, netrc, ConfigParser
SCRAPY_CFG_FILE = os.path.expanduser("~/.scrapy.cfg")
NETRC_FILE = os.path.expanduser('~/.netrc')
def missing_modules(*modules):
"""Receives a list of module names and returns those which are missing"""
missing = []
for module_name in modules:
try:
imp.find_module(module_name)
except ImportError:
missing.append(module_name)
return missing
def find_api_key():
"""Finds and returns the Scrapy Cloud APIKEY"""
key = os.getenv("SHUB_APIKEY")
if not key:
key = get_key_netrc()
return key
def get_key_netrc():
"""Gets the key from the netrc file"""
try:
info = netrc.netrc()
except IOError:
return
try:
key, account, password = info.authenticators("scrapinghub.com")
except TypeError:
return
if key:
return key
|
6a4dd66035956037d660271f18592af04edab818
|
read_images.py
|
read_images.py
|
import time
import cv2
import os
import glob
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[ele.split('/')[1] for ele in file_names]
t3 = time.time()
print('Time to list labels: ', t3-t2)
# for i in range(len(file_names)):
# print(file_names[i], file_classes[i])
images = [cv2.imread(file) for file in file_names]
t4 = time.time()
print('Time to read images: ',t4-t3)
|
import time
import os
import glob
import tensorflow as tf
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
filename_queue = tf.train.string_input_producer(file_names)
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[int(ele.split('/')[1], base=16) for ele in file_names]
try:
file_labels = [str(chr(i)) for i in file_classes] #python 3
except:
file_labels = [str(unichr(i)) for i in file_classes] #python 2.7
t3 = time.time()
print('Time to list labels: ', t3-t2)
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
my_img = tf.image.decode_png(value) # use png or jpg decoder based on your files.
init_op = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init_op)
# Start populating the filename queue.
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=sess)
for i in range(len(file_classes)): #length of your filename list
image = my_img.eval(session = sess) #here is your image Tensor :)
coord.request_stop()
coord.join(threads)
t4 = time.time()
print('Time to read images: ',t4-t3)
|
Read all images using tf itself
|
Read all images using tf itself
|
Python
|
apache-2.0
|
iitmcvg/OCR-Handwritten-Text,iitmcvg/OCR-Handwritten-Text,iitmcvg/OCR-Handwritten-Text
|
import time
import cv2
import os
import glob
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[ele.split('/')[1] for ele in file_names]
t3 = time.time()
print('Time to list labels: ', t3-t2)
# for i in range(len(file_names)):
# print(file_names[i], file_classes[i])
images = [cv2.imread(file) for file in file_names]
t4 = time.time()
print('Time to read images: ',t4-t3)
Read all images using tf itself
|
import time
import os
import glob
import tensorflow as tf
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
filename_queue = tf.train.string_input_producer(file_names)
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[int(ele.split('/')[1], base=16) for ele in file_names]
try:
file_labels = [str(chr(i)) for i in file_classes] #python 3
except:
file_labels = [str(unichr(i)) for i in file_classes] #python 2.7
t3 = time.time()
print('Time to list labels: ', t3-t2)
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
my_img = tf.image.decode_png(value) # use png or jpg decoder based on your files.
init_op = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init_op)
# Start populating the filename queue.
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=sess)
for i in range(len(file_classes)): #length of your filename list
image = my_img.eval(session = sess) #here is your image Tensor :)
coord.request_stop()
coord.join(threads)
t4 = time.time()
print('Time to read images: ',t4-t3)
|
<commit_before>import time
import cv2
import os
import glob
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[ele.split('/')[1] for ele in file_names]
t3 = time.time()
print('Time to list labels: ', t3-t2)
# for i in range(len(file_names)):
# print(file_names[i], file_classes[i])
images = [cv2.imread(file) for file in file_names]
t4 = time.time()
print('Time to read images: ',t4-t3)
<commit_msg>Read all images using tf itself<commit_after>
|
import time
import os
import glob
import tensorflow as tf
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
filename_queue = tf.train.string_input_producer(file_names)
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[int(ele.split('/')[1], base=16) for ele in file_names]
try:
file_labels = [str(chr(i)) for i in file_classes] #python 3
except:
file_labels = [str(unichr(i)) for i in file_classes] #python 2.7
t3 = time.time()
print('Time to list labels: ', t3-t2)
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
my_img = tf.image.decode_png(value) # use png or jpg decoder based on your files.
init_op = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init_op)
# Start populating the filename queue.
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=sess)
for i in range(len(file_classes)): #length of your filename list
image = my_img.eval(session = sess) #here is your image Tensor :)
coord.request_stop()
coord.join(threads)
t4 = time.time()
print('Time to read images: ',t4-t3)
|
import time
import cv2
import os
import glob
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[ele.split('/')[1] for ele in file_names]
t3 = time.time()
print('Time to list labels: ', t3-t2)
# for i in range(len(file_names)):
# print(file_names[i], file_classes[i])
images = [cv2.imread(file) for file in file_names]
t4 = time.time()
print('Time to read images: ',t4-t3)
Read all images using tf itselfimport time
import os
import glob
import tensorflow as tf
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
filename_queue = tf.train.string_input_producer(file_names)
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[int(ele.split('/')[1], base=16) for ele in file_names]
try:
file_labels = [str(chr(i)) for i in file_classes] #python 3
except:
file_labels = [str(unichr(i)) for i in file_classes] #python 2.7
t3 = time.time()
print('Time to list labels: ', t3-t2)
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
my_img = tf.image.decode_png(value) # use png or jpg decoder based on your files.
init_op = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init_op)
# Start populating the filename queue.
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=sess)
for i in range(len(file_classes)): #length of your filename list
image = my_img.eval(session = sess) #here is your image Tensor :)
coord.request_stop()
coord.join(threads)
t4 = time.time()
print('Time to read images: ',t4-t3)
|
<commit_before>import time
import cv2
import os
import glob
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[ele.split('/')[1] for ele in file_names]
t3 = time.time()
print('Time to list labels: ', t3-t2)
# for i in range(len(file_names)):
# print(file_names[i], file_classes[i])
images = [cv2.imread(file) for file in file_names]
t4 = time.time()
print('Time to read images: ',t4-t3)
<commit_msg>Read all images using tf itself<commit_after>import time
import os
import glob
import tensorflow as tf
# path = 'by_class'
path = 'test'
t1 = time.time()
file_names=glob.glob(os.path.join(path,'*','train_*','*.[pP][nN][gG]'))
filename_queue = tf.train.string_input_producer(file_names)
t2 = time.time()
print('Time to list files: ', t2-t1)
file_classes=[int(ele.split('/')[1], base=16) for ele in file_names]
try:
file_labels = [str(chr(i)) for i in file_classes] #python 3
except:
file_labels = [str(unichr(i)) for i in file_classes] #python 2.7
t3 = time.time()
print('Time to list labels: ', t3-t2)
reader = tf.WholeFileReader()
key, value = reader.read(filename_queue)
my_img = tf.image.decode_png(value) # use png or jpg decoder based on your files.
init_op = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init_op)
# Start populating the filename queue.
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(coord=coord, sess=sess)
for i in range(len(file_classes)): #length of your filename list
image = my_img.eval(session = sess) #here is your image Tensor :)
coord.request_stop()
coord.join(threads)
t4 = time.time()
print('Time to read images: ',t4-t3)
|
169d32333aa3152dcec893f2ce58c46d614aaea4
|
models/employees.py
|
models/employees.py
|
import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form={}):
if not form:
raise ValueError('Form is supplied with wrong data.')
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
|
import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form=None):
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
|
Fix Dangerous default value {} as argument, pylint.
|
Fix Dangerous default value {} as argument, pylint.
|
Python
|
mit
|
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
|
import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form={}):
if not form:
raise ValueError('Form is supplied with wrong data.')
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
Fix Dangerous default value {} as argument, pylint.
|
import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form=None):
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
|
<commit_before>import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form={}):
if not form:
raise ValueError('Form is supplied with wrong data.')
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
<commit_msg>Fix Dangerous default value {} as argument, pylint.<commit_after>
|
import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form=None):
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
|
import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form={}):
if not form:
raise ValueError('Form is supplied with wrong data.')
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
Fix Dangerous default value {} as argument, pylint.import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form=None):
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
|
<commit_before>import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form={}):
if not form:
raise ValueError('Form is supplied with wrong data.')
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
<commit_msg>Fix Dangerous default value {} as argument, pylint.<commit_after>import datetime
from openedoo.core.libs.tools import hashing_werkzeug
from openedoo_project import db
from .users import User
class Employee(User):
@classmethod
def is_exist(self, username):
employee = self.query.get(username=username).first()
return employee
@classmethod
def get_public_list(self):
employees = self.query.with_entities(self.username,
self.fullname,
self.nip)
return employees
@classmethod
def check_records(self):
employees = self.query.limit(1).all()
return employees
@classmethod
def add(self, form=None):
data = {
'username': form['username'],
'fullname': form['fullname'],
'password': hashing_werkzeug(form['password']),
'nip': form['nip'],
'created': datetime.datetime.now()
}
employeeData = self(data)
db.session.add(employeeData)
return db.session.commit()
|
30e261d895fd4260e3788398b8dd46a5e889815e
|
sandbox/urls.py
|
sandbox/urls.py
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
|
Include i18n URLs in sandbox
|
Include i18n URLs in sandbox
|
Python
|
bsd-3-clause
|
Mariana-Tek/django-oscar-accounts,Jannes123/django-oscar-accounts,michaelkuty/django-oscar-accounts,Mariana-Tek/django-oscar-accounts,django-oscar/django-oscar-accounts,machtfit/django-oscar-accounts,machtfit/django-oscar-accounts,Jannes123/django-oscar-accounts,michaelkuty/django-oscar-accounts,django-oscar/django-oscar-accounts
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
Include i18n URLs in sandbox
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
|
<commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
<commit_msg>Include i18n URLs in sandbox<commit_after>
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
|
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
Include i18n URLs in sandboxfrom django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
|
<commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
<commit_msg>Include i18n URLs in sandbox<commit_after>from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from apps.app import application
from accounts.dashboard.app import application as accounts_app
from accounts.views import AccountBalanceView
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^giftcard-balance/', AccountBalanceView.as_view(),
name="account-balance"),
(r'^dashboard/accounts/', include(accounts_app.urls)),
(r'', include(application.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')))
|
0189eef29ce0054ef8747da317c0717bde196c17
|
py2app/__init__.py
|
py2app/__init__.py
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
Set py2app.__version__ using pkg_resources, that ensures that the version stays in sync with the value in setup.py
|
Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py
|
Python
|
mit
|
metachris/py2app,metachris/py2app,metachris/py2app,metachris/py2app
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
<commit_before>"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
<commit_msg>Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py<commit_after>
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
<commit_before>"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
__version__ = "0.4.4"
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
<commit_msg>Set py2app.__version__ using pkg_resources, that ensures that the version
stays in sync with the value in setup.py<commit_after>"""
builds Mac OS X application bundles from Python scripts
New keywords for distutils' setup function specify what to build:
app
list of scripts to convert into gui app bundles
py2app options, to be specified in the options keyword to the setup function:
optimize - string or int (0, 1, or 2)
includes - list of module names to include
packages - list of packages to include with subpackages
ignores - list of modules to ignore if they are not found
excludes - list of module names to exclude
dylib_excludes - list of dylibs and/or frameworks to exclude
resources - list of additional files and folders to include
plist - Info.plist template file, dict, or plistlib.Plist
dist_dir - directory where to build the final files
Items in the macosx list can also be
dictionaries to further customize the build process. The following
keys in the dictionary are recognized, most are optional:
script (MACOSX) - list of python scripts (required)
dest_base - directory and basename for the executable
if a directory is contained, must be the same for all targets
"""
import pkg_resources
__version__ = pkg_resources.require('py2app')[0].version
# This makes the py2app command work in the distutils.core.setup() case
import setuptools
|
84f913d928d28bc193d21eb223e7815f69c53a22
|
plugins/jira.py
|
plugins/jira.py
|
from neb.engine import Plugin, Command
import requests
class JiraPlugin(Plugin):
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on Matrix JIRA.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
return self._body("Boo")
def sync(self, matrix, initial_sync):
pass
|
from neb.engine import Plugin, Command, KeyValueStore
import json
import requests
class JiraPlugin(Plugin):
def __init__(self, config="jira.json"):
self.store = KeyValueStore(config)
if not self.store.has("url"):
url = raw_input("JIRA URL: ").strip()
self.store.set("url", url)
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on a JIRA platform.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
if len(args) == 1:
return self._body("Perform commands on a JIRA platform.")
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
url = self._url("/rest/api/2/serverInfo")
response = json.loads(requests.get(url).text)
info = "%s : version %s : build %s" % (response["serverTitle"],
response["version"], response["buildNumber"])
return self._body(info)
def sync(self, matrix, initial_sync):
pass
def _url(self, path):
return self.store.get("url") + path
|
Make the plugin request server info from JIRA.
|
Make the plugin request server info from JIRA.
|
Python
|
apache-2.0
|
Kegsay/Matrix-NEB,matrix-org/Matrix-NEB,illicitonion/Matrix-NEB
|
from neb.engine import Plugin, Command
import requests
class JiraPlugin(Plugin):
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on Matrix JIRA.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
return self._body("Boo")
def sync(self, matrix, initial_sync):
pass
Make the plugin request server info from JIRA.
|
from neb.engine import Plugin, Command, KeyValueStore
import json
import requests
class JiraPlugin(Plugin):
def __init__(self, config="jira.json"):
self.store = KeyValueStore(config)
if not self.store.has("url"):
url = raw_input("JIRA URL: ").strip()
self.store.set("url", url)
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on a JIRA platform.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
if len(args) == 1:
return self._body("Perform commands on a JIRA platform.")
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
url = self._url("/rest/api/2/serverInfo")
response = json.loads(requests.get(url).text)
info = "%s : version %s : build %s" % (response["serverTitle"],
response["version"], response["buildNumber"])
return self._body(info)
def sync(self, matrix, initial_sync):
pass
def _url(self, path):
return self.store.get("url") + path
|
<commit_before>from neb.engine import Plugin, Command
import requests
class JiraPlugin(Plugin):
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on Matrix JIRA.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
return self._body("Boo")
def sync(self, matrix, initial_sync):
pass
<commit_msg>Make the plugin request server info from JIRA.<commit_after>
|
from neb.engine import Plugin, Command, KeyValueStore
import json
import requests
class JiraPlugin(Plugin):
def __init__(self, config="jira.json"):
self.store = KeyValueStore(config)
if not self.store.has("url"):
url = raw_input("JIRA URL: ").strip()
self.store.set("url", url)
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on a JIRA platform.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
if len(args) == 1:
return self._body("Perform commands on a JIRA platform.")
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
url = self._url("/rest/api/2/serverInfo")
response = json.loads(requests.get(url).text)
info = "%s : version %s : build %s" % (response["serverTitle"],
response["version"], response["buildNumber"])
return self._body(info)
def sync(self, matrix, initial_sync):
pass
def _url(self, path):
return self.store.get("url") + path
|
from neb.engine import Plugin, Command
import requests
class JiraPlugin(Plugin):
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on Matrix JIRA.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
return self._body("Boo")
def sync(self, matrix, initial_sync):
pass
Make the plugin request server info from JIRA.from neb.engine import Plugin, Command, KeyValueStore
import json
import requests
class JiraPlugin(Plugin):
def __init__(self, config="jira.json"):
self.store = KeyValueStore(config)
if not self.store.has("url"):
url = raw_input("JIRA URL: ").strip()
self.store.set("url", url)
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on a JIRA platform.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
if len(args) == 1:
return self._body("Perform commands on a JIRA platform.")
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
url = self._url("/rest/api/2/serverInfo")
response = json.loads(requests.get(url).text)
info = "%s : version %s : build %s" % (response["serverTitle"],
response["version"], response["buildNumber"])
return self._body(info)
def sync(self, matrix, initial_sync):
pass
def _url(self, path):
return self.store.get("url") + path
|
<commit_before>from neb.engine import Plugin, Command
import requests
class JiraPlugin(Plugin):
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on Matrix JIRA.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
return self._body("Boo")
def sync(self, matrix, initial_sync):
pass
<commit_msg>Make the plugin request server info from JIRA.<commit_after>from neb.engine import Plugin, Command, KeyValueStore
import json
import requests
class JiraPlugin(Plugin):
def __init__(self, config="jira.json"):
self.store = KeyValueStore(config)
if not self.store.has("url"):
url = raw_input("JIRA URL: ").strip()
self.store.set("url", url)
def get_commands(self):
"""Return human readable commands with descriptions.
Returns:
list[Command]
"""
return [
Command("jira", self.jira, "Perform commands on a JIRA platform.", [
"server-info - Retrieve server information."
]),
]
def jira(self, event, args):
if len(args) == 1:
return self._body("Perform commands on a JIRA platform.")
action = args[1]
actions = {
"server-info": self._server_info
}
return actions[action](event, args)
def _server_info(self, event, args):
url = self._url("/rest/api/2/serverInfo")
response = json.loads(requests.get(url).text)
info = "%s : version %s : build %s" % (response["serverTitle"],
response["version"], response["buildNumber"])
return self._body(info)
def sync(self, matrix, initial_sync):
pass
def _url(self, path):
return self.store.get("url") + path
|
2171559a3cfcaeda2abe8a343c118769edad245f
|
src/keybar/conf/development.py
|
src/keybar/conf/development.py
|
import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
|
import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
Update logging config to actually log exceptions to console.
|
Update logging config to actually log exceptions to console.
|
Python
|
bsd-3-clause
|
keybar/keybar
|
import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
Update logging config to actually log exceptions to console.
|
import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
<commit_before>import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
<commit_msg>Update logging config to actually log exceptions to console.<commit_after>
|
import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
Update logging config to actually log exceptions to console.import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
<commit_before>import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
<commit_msg>Update logging config to actually log exceptions to console.<commit_after>import os
from keybar.conf.base import *
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
# TODO: Make this a bit more automated.
KEYBAR_DOMAIN = 'keybar.local'
KEYBAR_HOST = 'keybar.local:8443'
KEYBAR_VERIFY_CLIENT_CERTIFICATE = False
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['console'],
'level': 'DEBUG',
'propagate': True,
},
},
}
|
cb036a9725304f138b6da92bc0aae7f497cbafa5
|
server.py
|
server.py
|
import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
import argparse
import json
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
NUM_RECORDS = 5
RECORD_SIZE = 64
app = Flask(__name__)
@app.route('/db_size')
def get_db_size():
return json.dumps({'num_records': NUM_RECORDS, 'record_size': RECORD_SIZE})
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
Add call to get the database size, as JSON
|
Add call to get the database size, as JSON
|
Python
|
mit
|
blindstore/blindstore-old-scarab
|
import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
Add call to get the database size, as JSON
|
import argparse
import json
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
NUM_RECORDS = 5
RECORD_SIZE = 64
app = Flask(__name__)
@app.route('/db_size')
def get_db_size():
return json.dumps({'num_records': NUM_RECORDS, 'record_size': RECORD_SIZE})
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
<commit_before>import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
<commit_msg>Add call to get the database size, as JSON<commit_after>
|
import argparse
import json
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
NUM_RECORDS = 5
RECORD_SIZE = 64
app = Flask(__name__)
@app.route('/db_size')
def get_db_size():
return json.dumps({'num_records': NUM_RECORDS, 'record_size': RECORD_SIZE})
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
Add call to get the database size, as JSONimport argparse
import json
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
NUM_RECORDS = 5
RECORD_SIZE = 64
app = Flask(__name__)
@app.route('/db_size')
def get_db_size():
return json.dumps({'num_records': NUM_RECORDS, 'record_size': RECORD_SIZE})
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
<commit_before>import argparse
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
app = Flask(__name__)
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
<commit_msg>Add call to get the database size, as JSON<commit_after>import argparse
import json
from flask import Flask, request
parser = argparse.ArgumentParser(description="Start a Blindstore server.")
parser.add_argument('-d', '--debug', action='store_true',
help="enable Flask debug mode. DO NOT use in production.")
args = parser.parse_args()
NUM_RECORDS = 5
RECORD_SIZE = 64
app = Flask(__name__)
@app.route('/db_size')
def get_db_size():
return json.dumps({'num_records': NUM_RECORDS, 'record_size': RECORD_SIZE})
@app.route('/retrieve', methods=['POST'])
def get():
public_key = request.form['PUBLIC_KEY']
enc_index = request.form['ENC_INDEX']
return "/retrieve index '{index}' with key '{key}'".format(index=enc_index, key=public_key)
@app.route('/set', methods=['POST'])
def put():
enc_index = request.form['ENC_INDEX']
enc_data = request.form['ENC_DATA']
return "/set '{index}' to '{data}'".format(data=enc_data, index=enc_index)
if __name__ == '__main__':
app.run(debug=args.debug)
|
4fd67e4e17f0813056493a635e8256a017d894e2
|
src/tempel/models.py
|
src/tempel/models.py
|
from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
|
from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
def __unicode__(self):
return '<Entry: id=%s lang=%s>' % (self.id, self.language)
|
Add text representation for Entry object
|
Add text representation for Entry object
|
Python
|
agpl-3.0
|
fajran/tempel
|
from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
Add text representation for Entry object
|
from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
def __unicode__(self):
return '<Entry: id=%s lang=%s>' % (self.id, self.language)
|
<commit_before>from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
<commit_msg>Add text representation for Entry object<commit_after>
|
from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
def __unicode__(self):
return '<Entry: id=%s lang=%s>' % (self.id, self.language)
|
from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
Add text representation for Entry objectfrom django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
def __unicode__(self):
return '<Entry: id=%s lang=%s>' % (self.id, self.language)
|
<commit_before>from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
<commit_msg>Add text representation for Entry object<commit_after>from django.db import models
from django.conf import settings
from tempel import utils
class Entry(models.Model):
content = models.TextField()
language = models.CharField(max_length=20,
choices=utils.get_languages())
created = models.DateTimeField(auto_now=True, auto_now_add=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
verbose_name_plural = "entries"
def get_language(self):
return utils.get_language(self.language)
def get_mimetype(self):
return utils.get_mimetype(self.language)
def get_filename(self):
return '%s.%s' % (self.id, self.get_extension())
def get_extension(self):
return utils.get_extension(self.language)
def __unicode__(self):
return '<Entry: id=%s lang=%s>' % (self.id, self.language)
|
d17a2308ff903b459b6c9310fd6d42eb0e051544
|
statsSend/teamCity/teamCityStatisticsSender.py
|
statsSend/teamCity/teamCityStatisticsSender.py
|
#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
job = build_run.toJob()
self.reporter.report_job(job)
|
#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
try:
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
except Exception as err:
eprint("Error sending categories" + err)
try:
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
try:
job = build_run.toJob()
self.reporter.report_job(job)
except Exception as err:
eprint("Error reporting job" + err)
except Exception as err:
eprint("Error reporting jobs" + err)
|
Add error handling in statistics sender
|
Add error handling in statistics sender
|
Python
|
mit
|
luigiberrettini/build-deploy-stats
|
#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
job = build_run.toJob()
self.reporter.report_job(job)Add error handling in statistics sender
|
#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
try:
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
except Exception as err:
eprint("Error sending categories" + err)
try:
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
try:
job = build_run.toJob()
self.reporter.report_job(job)
except Exception as err:
eprint("Error reporting job" + err)
except Exception as err:
eprint("Error reporting jobs" + err)
|
<commit_before>#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
job = build_run.toJob()
self.reporter.report_job(job)<commit_msg>Add error handling in statistics sender<commit_after>
|
#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
try:
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
except Exception as err:
eprint("Error sending categories" + err)
try:
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
try:
job = build_run.toJob()
self.reporter.report_job(job)
except Exception as err:
eprint("Error reporting job" + err)
except Exception as err:
eprint("Error reporting jobs" + err)
|
#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
job = build_run.toJob()
self.reporter.report_job(job)Add error handling in statistics sender#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
try:
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
except Exception as err:
eprint("Error sending categories" + err)
try:
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
try:
job = build_run.toJob()
self.reporter.report_job(job)
except Exception as err:
eprint("Error reporting job" + err)
except Exception as err:
eprint("Error reporting jobs" + err)
|
<commit_before>#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
job = build_run.toJob()
self.reporter.report_job(job)<commit_msg>Add error handling in statistics sender<commit_after>#!/usr/bin/env python3
from dateutil import parser
from statsSend.teamCity.teamCityConnection import TeamCityConnection
from statsSend.teamCity.teamCityUrlBuilder import TeamCityUrlBuilder
from statsSend.teamCity.teamCityProject import TeamCityProject
class TeamCityStatisticsSender:
def __init__(self, settings, reporter):
self.page_size = int(settings['page_size'])
connection = TeamCityConnection(settings['user'], settings['password'])
url_builder = TeamCityUrlBuilder(settings['server_url'], settings['api_url_prefix'])
self.project = TeamCityProject(settings['project_id'], connection, url_builder, self.page_size)
self.since_timestamp = parser.parse(settings['since_timestamp']).strftime('%Y%m%dT%H%M%S%z')
self.reporter = reporter
async def send(self):
if ("report_categories" in dir(self.reporter)):
try:
categories = [build_configuration.toCategory() async for build_configuration in self.project.retrieve_build_configurations()]
self.reporter.report_categories(categories)
except Exception as err:
eprint("Error sending categories" + err)
try:
async for build_configuration in self.project.retrieve_build_configurations():
async for build_run in build_configuration.retrieve_build_runs_since_timestamp(self.since_timestamp):
try:
job = build_run.toJob()
self.reporter.report_job(job)
except Exception as err:
eprint("Error reporting job" + err)
except Exception as err:
eprint("Error reporting jobs" + err)
|
00b134df7281c39595f9efcc1c1da047d1d10277
|
src/encoded/authorization.py
|
src/encoded/authorization.py
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab_uuids = user.properties.get('lab_uuids', [])
principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
if CHERRY_LAB_UUID in lab_uuids:
principals.append('group:admin')
return principals
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab = user.properties.get('lab')
if lab:
principals.append('lab:%s' % lab)
submits_for = user.properties.get('submits_for', [])
principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
if CHERRY_LAB_UUID in submits_for:
principals.append('group:admin')
return principals
|
Update group finder to new schemas
|
Update group finder to new schemas
|
Python
|
mit
|
kidaa/encoded,philiptzou/clincoded,4dn-dcic/fourfront,hms-dbmi/fourfront,4dn-dcic/fourfront,philiptzou/clincoded,philiptzou/clincoded,hms-dbmi/fourfront,kidaa/encoded,ENCODE-DCC/snovault,ClinGen/clincoded,ENCODE-DCC/snovault,kidaa/encoded,T2DREAM/t2dream-portal,philiptzou/clincoded,ENCODE-DCC/snovault,ENCODE-DCC/encoded,hms-dbmi/fourfront,T2DREAM/t2dream-portal,kidaa/encoded,4dn-dcic/fourfront,4dn-dcic/fourfront,ENCODE-DCC/snovault,hms-dbmi/fourfront,ENCODE-DCC/encoded,ClinGen/clincoded,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,hms-dbmi/fourfront,kidaa/encoded,ClinGen/clincoded,ENCODE-DCC/encoded,ClinGen/clincoded,ClinGen/clincoded,T2DREAM/t2dream-portal,ENCODE-DCC/encoded,philiptzou/clincoded
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab_uuids = user.properties.get('lab_uuids', [])
principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
if CHERRY_LAB_UUID in lab_uuids:
principals.append('group:admin')
return principals
Update group finder to new schemas
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab = user.properties.get('lab')
if lab:
principals.append('lab:%s' % lab)
submits_for = user.properties.get('submits_for', [])
principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
if CHERRY_LAB_UUID in submits_for:
principals.append('group:admin')
return principals
|
<commit_before>from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab_uuids = user.properties.get('lab_uuids', [])
principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
if CHERRY_LAB_UUID in lab_uuids:
principals.append('group:admin')
return principals
<commit_msg>Update group finder to new schemas<commit_after>
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab = user.properties.get('lab')
if lab:
principals.append('lab:%s' % lab)
submits_for = user.properties.get('submits_for', [])
principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
if CHERRY_LAB_UUID in submits_for:
principals.append('group:admin')
return principals
|
from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab_uuids = user.properties.get('lab_uuids', [])
principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
if CHERRY_LAB_UUID in lab_uuids:
principals.append('group:admin')
return principals
Update group finder to new schemasfrom .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab = user.properties.get('lab')
if lab:
principals.append('lab:%s' % lab)
submits_for = user.properties.get('submits_for', [])
principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
if CHERRY_LAB_UUID in submits_for:
principals.append('group:admin')
return principals
|
<commit_before>from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab_uuids = user.properties.get('lab_uuids', [])
principals.extend('lab:' + lab_uuid for lab_uuid in lab_uuids)
if CHERRY_LAB_UUID in lab_uuids:
principals.append('group:admin')
return principals
<commit_msg>Update group finder to new schemas<commit_after>from .contentbase import LOCATION_ROOT
CHERRY_LAB_UUID = 'cfb789b8-46f3-4d59-a2b3-adc39e7df93a'
def groupfinder(login, request):
if ':' not in login:
return None
namespace, localname = login.split(':', 1)
user = None
# We may get called before the context is found and the root set
root = request.registry[LOCATION_ROOT]
if namespace == 'remoteuser':
if localname in ['TEST', 'IMPORT']:
return ['group:admin']
if namespace in ('mailto', 'remoteuser'):
users = root.by_item_type['user']
try:
user = users[localname]
except KeyError:
return None
elif namespace == 'accesskey':
access_keys = root.by_item_type['access_key']
try:
access_key = access_keys[localname]
except KeyError:
return None
userid = access_key.properties['user_uuid']
user = root.by_item_type['user'][userid]
if user is None:
return None
principals = ['userid:%s' % user.uuid]
lab = user.properties.get('lab')
if lab:
principals.append('lab:%s' % lab)
submits_for = user.properties.get('submits_for', [])
principals.extend('lab:%s' % lab_uuid for lab_uuid in submits_for)
principals.extend('submits_for:%s' % lab_uuid for lab_uuid in submits_for)
if CHERRY_LAB_UUID in submits_for:
principals.append('group:admin')
return principals
|
5cd9ac8d3079fca16828b25b40fed8358286708b
|
geotrek/outdoor/models.py
|
geotrek/outdoor/models.py
|
from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
|
from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
@property
def name_display(self):
return '<a data-pk="{pk}" href="{url}" title="{name}">{name}</a>'.format(
pk=self.pk,
url=self.get_detail_url(),
name=self.name
)
|
Add links to site detail in site list
|
Add links to site detail in site list
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
|
from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
Add links to site detail in site list
|
from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
@property
def name_display(self):
return '<a data-pk="{pk}" href="{url}" title="{name}">{name}</a>'.format(
pk=self.pk,
url=self.get_detail_url(),
name=self.name
)
|
<commit_before>from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
<commit_msg>Add links to site detail in site list<commit_after>
|
from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
@property
def name_display(self):
return '<a data-pk="{pk}" href="{url}" title="{name}">{name}</a>'.format(
pk=self.pk,
url=self.get_detail_url(),
name=self.name
)
|
from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
Add links to site detail in site listfrom django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
@property
def name_display(self):
return '<a data-pk="{pk}" href="{url}" title="{name}">{name}</a>'.format(
pk=self.pk,
url=self.get_detail_url(),
name=self.name
)
|
<commit_before>from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
<commit_msg>Add links to site detail in site list<commit_after>from django.conf import settings
from django.contrib.gis.db import models
from django.utils.translation import gettext_lazy as _
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import NoDeleteMixin, TimeStampedModelMixin, AddPropertyMixin
from mapentity.models import MapEntityMixin
class Site(AddPropertyMixin, MapEntityMixin, StructureRelated,
TimeStampedModelMixin, NoDeleteMixin):
geom = models.GeometryField(verbose_name=_("Location"), srid=settings.SRID)
name = models.CharField(verbose_name=_("Name"), max_length=128)
description = models.TextField(verbose_name=_("Description"), blank=True)
eid = models.CharField(verbose_name=_("External id"), max_length=1024, blank=True, null=True)
class Meta:
verbose_name = _("Site")
verbose_name_plural = _("Sites")
ordering = ('name', )
def __str__(self):
return self.name
@property
def name_display(self):
return '<a data-pk="{pk}" href="{url}" title="{name}">{name}</a>'.format(
pk=self.pk,
url=self.get_detail_url(),
name=self.name
)
|
c8e11b602eb7525789ed1c5f4ea686f45b44f304
|
src/diamond/handler/httpHandler.py
|
src/diamond/handler/httpHandler.py
|
#!/usr/bin/python2.7
from Handler import Handler
import urllib
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
#!/usr/bin/env python
# coding=utf-8
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
Remove unneeded import, fix python path and add coding
|
Remove unneeded import, fix python path and add coding
|
Python
|
mit
|
signalfx/Diamond,ramjothikumar/Diamond,jriguera/Diamond,anandbhoraskar/Diamond,jriguera/Diamond,Precis/Diamond,jriguera/Diamond,socialwareinc/Diamond,saucelabs/Diamond,acquia/Diamond,dcsquared13/Diamond,stuartbfox/Diamond,hvnsweeting/Diamond,h00dy/Diamond,cannium/Diamond,dcsquared13/Diamond,Ssawa/Diamond,bmhatfield/Diamond,TAKEALOT/Diamond,tellapart/Diamond,krbaker/Diamond,MediaMath/Diamond,Netuitive/netuitive-diamond,MediaMath/Diamond,tuenti/Diamond,timchenxiaoyu/Diamond,tellapart/Diamond,EzyInsights/Diamond,russss/Diamond,tusharmakkar08/Diamond,Netuitive/Diamond,Ormod/Diamond,jriguera/Diamond,codepython/Diamond,sebbrandt87/Diamond,actmd/Diamond,Netuitive/netuitive-diamond,socialwareinc/Diamond,mzupan/Diamond,EzyInsights/Diamond,russss/Diamond,TinLe/Diamond,disqus/Diamond,jumping/Diamond,metamx/Diamond,Basis/Diamond,socialwareinc/Diamond,Nihn/Diamond-1,gg7/diamond,Ensighten/Diamond,joel-airspring/Diamond,jumping/Diamond,zoidbergwill/Diamond,rtoma/Diamond,timchenxiaoyu/Diamond,ceph/Diamond,signalfx/Diamond,Netuitive/Diamond,stuartbfox/Diamond,acquia/Diamond,disqus/Diamond,anandbhoraskar/Diamond,MichaelDoyle/Diamond,cannium/Diamond,sebbrandt87/Diamond,jaingaurav/Diamond,codepython/Diamond,h00dy/Diamond,works-mobile/Diamond,janisz/Diamond-1,krbaker/Diamond,MediaMath/Diamond,thardie/Diamond,eMerzh/Diamond-1,tusharmakkar08/Diamond,Ormod/Diamond,krbaker/Diamond,Netuitive/netuitive-diamond,hvnsweeting/Diamond,sebbrandt87/Diamond,python-diamond/Diamond,mzupan/Diamond,codepython/Diamond,skbkontur/Diamond,tellapart/Diamond,Nihn/Diamond-1,Clever/Diamond,metamx/Diamond,mfriedenhagen/Diamond,russss/Diamond,gg7/diamond,Ssawa/Diamond,Netuitive/Diamond,tellapart/Diamond,stuartbfox/Diamond,datafiniti/Diamond,Precis/Diamond,gg7/diamond,mfriedenhagen/Diamond,TinLe/Diamond,python-diamond/Diamond,jumping/Diamond,thardie/Diamond,eMerzh/Diamond-1,hamelg/Diamond,rtoma/Diamond,Netuitive/netuitive-diamond,mzupan/Diamond,TAKEALOT/Diamond,Basis/Diamond,skbkontur/Diamond,h00dy/Diamond,Ensighten/Diamond,jaingaurav/Diamond,joel-airspring/Diamond,hamelg/Diamond,mfriedenhagen/Diamond,joel-airspring/Diamond,szibis/Diamond,Precis/Diamond,Ensighten/Diamond,Slach/Diamond,saucelabs/Diamond,anandbhoraskar/Diamond,ceph/Diamond,Precis/Diamond,actmd/Diamond,Clever/Diamond,cannium/Diamond,cannium/Diamond,h00dy/Diamond,mzupan/Diamond,tuenti/Diamond,saucelabs/Diamond,thardie/Diamond,janisz/Diamond-1,bmhatfield/Diamond,works-mobile/Diamond,zoidbergwill/Diamond,works-mobile/Diamond,datafiniti/Diamond,gg7/diamond,MichaelDoyle/Diamond,bmhatfield/Diamond,tuenti/Diamond,TinLe/Diamond,signalfx/Diamond,sebbrandt87/Diamond,acquia/Diamond,saucelabs/Diamond,janisz/Diamond-1,Clever/Diamond,zoidbergwill/Diamond,actmd/Diamond,janisz/Diamond-1,skbkontur/Diamond,Ensighten/Diamond,codepython/Diamond,MediaMath/Diamond,acquia/Diamond,bmhatfield/Diamond,TAKEALOT/Diamond,MichaelDoyle/Diamond,stuartbfox/Diamond,zoidbergwill/Diamond,works-mobile/Diamond,jumping/Diamond,TAKEALOT/Diamond,krbaker/Diamond,hvnsweeting/Diamond,MichaelDoyle/Diamond,Basis/Diamond,disqus/Diamond,ramjothikumar/Diamond,CYBERBUGJR/Diamond,signalfx/Diamond,szibis/Diamond,CYBERBUGJR/Diamond,jaingaurav/Diamond,ceph/Diamond,Ssawa/Diamond,timchenxiaoyu/Diamond,hvnsweeting/Diamond,CYBERBUGJR/Diamond,actmd/Diamond,EzyInsights/Diamond,hamelg/Diamond,szibis/Diamond,szibis/Diamond,Ssawa/Diamond,rtoma/Diamond,Nihn/Diamond-1,datafiniti/Diamond,dcsquared13/Diamond,rtoma/Diamond,eMerzh/Diamond-1,Slach/Diamond,jaingaurav/Diamond,dcsquared13/Diamond,Nihn/Diamond-1,tusharmakkar08/Diamond,anandbhoraskar/Diamond,Clever/Diamond,Ormod/Diamond,ramjothikumar/Diamond,hamelg/Diamond,Basis/Diamond,russss/Diamond,joel-airspring/Diamond,Slach/Diamond,metamx/Diamond,CYBERBUGJR/Diamond,datafiniti/Diamond,thardie/Diamond,tuenti/Diamond,ramjothikumar/Diamond,Ormod/Diamond,TinLe/Diamond,tusharmakkar08/Diamond,EzyInsights/Diamond,timchenxiaoyu/Diamond,ceph/Diamond,mfriedenhagen/Diamond,skbkontur/Diamond,socialwareinc/Diamond,eMerzh/Diamond-1,Netuitive/Diamond,python-diamond/Diamond,Slach/Diamond
|
#!/usr/bin/python2.7
from Handler import Handler
import urllib
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
Remove unneeded import, fix python path and add coding
|
#!/usr/bin/env python
# coding=utf-8
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
<commit_before>#!/usr/bin/python2.7
from Handler import Handler
import urllib
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
<commit_msg>Remove unneeded import, fix python path and add coding<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
#!/usr/bin/python2.7
from Handler import Handler
import urllib
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
Remove unneeded import, fix python path and add coding#!/usr/bin/env python
# coding=utf-8
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
<commit_before>#!/usr/bin/python2.7
from Handler import Handler
import urllib
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
<commit_msg>Remove unneeded import, fix python path and add coding<commit_after>#!/usr/bin/env python
# coding=utf-8
from Handler import Handler
import urllib2
class HttpPostHandler(Handler):
# Inititalize Handler with url and batch size
def __init__(self, config=None):
Handler.__init__(self, config)
self.metrics = []
self.batch_size = int(self.config.get('batch', 100))
self.url = self.config.get('url')
# Join batched metrics and push to url mentioned in config
def process(self, metric):
self.metrics.append(str(metric))
if len(self.metrics) >= self.batch_size:
req = urllib2.Request(self.url, "\n".join(self.metrics))
urllib2.urlopen(req)
self.metrics = []
|
f076acb05840c361890fbb5ef0c8b43d0de7e2ed
|
opsdroid/message.py
|
opsdroid/message.py
|
""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
|
""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
self.regex = None
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
|
Make regex a None property
|
Make regex a None property
|
Python
|
apache-2.0
|
FabioRosado/opsdroid,jacobtomlinson/opsdroid,opsdroid/opsdroid
|
""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
Make regex a None property
|
""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
self.regex = None
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
|
<commit_before>""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
<commit_msg>Make regex a None property<commit_after>
|
""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
self.regex = None
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
|
""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
Make regex a None property""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
self.regex = None
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
|
<commit_before>""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
<commit_msg>Make regex a None property<commit_after>""" Class to encapsulate a message """
import logging
class Message:
""" A message object """
def __init__(self, text, user, room, connector):
""" Create object with minimum properties """
self.text = text
self.user = user
self.room = room
self.connector = connector
self.regex = None
def respond(self, text):
""" Respond to this message using the connector it was created by """
self.text = text
self.connector.respond(self)
|
c197bf432655ca051ff4fb672cd41e876d539990
|
pipeline/api/api.py
|
pipeline/api/api.py
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
def json_load(data):
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
raise falcon.HTTPBadRequest(None, 'invalid JSON')
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
def on_post(self, req, resp):
data = json_load(req.stream.read().decode('utf-8'))
data, errors = story_schema.load(data)
if errors:
raise falcon.HTTPBadRequest(None, errors)
story = models.Story.create(**data)
result = story_schema.dump(story)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
Allow creating and viewing stories
|
Allow creating and viewing stories
Closes #1
|
Python
|
mit
|
thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline,thepoly/Pipeline
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
Allow creating and viewing stories
Closes #1
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
def json_load(data):
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
raise falcon.HTTPBadRequest(None, 'invalid JSON')
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
def on_post(self, req, resp):
data = json_load(req.stream.read().decode('utf-8'))
data, errors = story_schema.load(data)
if errors:
raise falcon.HTTPBadRequest(None, errors)
story = models.Story.create(**data)
result = story_schema.dump(story)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
<commit_before>import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
<commit_msg>Allow creating and viewing stories
Closes #1<commit_after>
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
def json_load(data):
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
raise falcon.HTTPBadRequest(None, 'invalid JSON')
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
def on_post(self, req, resp):
data = json_load(req.stream.read().decode('utf-8'))
data, errors = story_schema.load(data)
if errors:
raise falcon.HTTPBadRequest(None, errors)
story = models.Story.create(**data)
result = story_schema.dump(story)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
Allow creating and viewing stories
Closes #1import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
def json_load(data):
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
raise falcon.HTTPBadRequest(None, 'invalid JSON')
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
def on_post(self, req, resp):
data = json_load(req.stream.read().decode('utf-8'))
data, errors = story_schema.load(data)
if errors:
raise falcon.HTTPBadRequest(None, errors)
story = models.Story.create(**data)
result = story_schema.dump(story)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
<commit_before>import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
<commit_msg>Allow creating and viewing stories
Closes #1<commit_after>import datetime
import json
import falcon
from pipeline.api import models, schemas
def json_serializer(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
raise TypeError('{} is not JSON serializable'.format(type(obj)))
def json_dump(data):
return json.dumps(data, default=json_serializer)
def json_load(data):
try:
return json.loads(data)
except json.decoder.JSONDecodeError:
raise falcon.HTTPBadRequest(None, 'invalid JSON')
stories_schema = schemas.StorySchema(many=True)
story_schema = schemas.StorySchema()
class StoriesResource:
def on_get(self, req, resp):
stories = models.Story.select()
result = stories_schema.dump(stories)
resp.body = json_dump(result.data)
def on_post(self, req, resp):
data = json_load(req.stream.read().decode('utf-8'))
data, errors = story_schema.load(data)
if errors:
raise falcon.HTTPBadRequest(None, errors)
story = models.Story.create(**data)
result = story_schema.dump(story)
resp.body = json_dump(result.data)
models.connect()
api = falcon.API()
api.add_route('/stories', StoriesResource())
|
220b6a9fee0f307d4de1e48b29093812f7dd10ec
|
var/spack/repos/builtin/packages/m4/package.py
|
var/spack/repos/builtin/packages/m4/package.py
|
from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
depends_on('libsigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
|
from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
variant('sigsegv', default=True, description="Build the libsigsegv dependency")
depends_on('libsigsegv', when='+sigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
|
Make libsigsegv an optional dependency
|
Make libsigsegv an optional dependency
|
Python
|
lgpl-2.1
|
lgarren/spack,mfherbst/spack,LLNL/spack,TheTimmy/spack,EmreAtes/spack,LLNL/spack,tmerrick1/spack,krafczyk/spack,TheTimmy/spack,TheTimmy/spack,lgarren/spack,skosukhin/spack,matthiasdiener/spack,TheTimmy/spack,mfherbst/spack,skosukhin/spack,krafczyk/spack,lgarren/spack,mfherbst/spack,matthiasdiener/spack,matthiasdiener/spack,mfherbst/spack,krafczyk/spack,iulian787/spack,iulian787/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,tmerrick1/spack,lgarren/spack,EmreAtes/spack,EmreAtes/spack,mfherbst/spack,lgarren/spack,LLNL/spack,TheTimmy/spack,iulian787/spack,krafczyk/spack,EmreAtes/spack,iulian787/spack,tmerrick1/spack,skosukhin/spack,krafczyk/spack,iulian787/spack,skosukhin/spack,tmerrick1/spack,tmerrick1/spack,skosukhin/spack,EmreAtes/spack,matthiasdiener/spack
|
from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
depends_on('libsigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
Make libsigsegv an optional dependency
|
from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
variant('sigsegv', default=True, description="Build the libsigsegv dependency")
depends_on('libsigsegv', when='+sigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
|
<commit_before>from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
depends_on('libsigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
<commit_msg>Make libsigsegv an optional dependency<commit_after>
|
from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
variant('sigsegv', default=True, description="Build the libsigsegv dependency")
depends_on('libsigsegv', when='+sigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
|
from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
depends_on('libsigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
Make libsigsegv an optional dependencyfrom spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
variant('sigsegv', default=True, description="Build the libsigsegv dependency")
depends_on('libsigsegv', when='+sigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
|
<commit_before>from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
depends_on('libsigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
<commit_msg>Make libsigsegv an optional dependency<commit_after>from spack import *
class M4(Package):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
version('1.4.17', 'a5e9954b1dae036762f7b13673a2cf76')
variant('sigsegv', default=True, description="Build the libsigsegv dependency")
depends_on('libsigsegv', when='+sigsegv')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
|
b38647ef390ed6c78c2d55d706bac2f6a396ad39
|
errors.py
|
errors.py
|
#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
|
#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
class MoiraUserError(MoiraBaseError):
"""An error related to Moira but not returned from the server."""
pass
|
Introduce a new error class.
|
Introduce a new error class.
|
Python
|
mit
|
vasilvv/pymoira
|
#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
Introduce a new error class.
|
#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
class MoiraUserError(MoiraBaseError):
"""An error related to Moira but not returned from the server."""
pass
|
<commit_before>#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
<commit_msg>Introduce a new error class.<commit_after>
|
#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
class MoiraUserError(MoiraBaseError):
"""An error related to Moira but not returned from the server."""
pass
|
#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
Introduce a new error class.#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
class MoiraUserError(MoiraBaseError):
"""An error related to Moira but not returned from the server."""
pass
|
<commit_before>#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
<commit_msg>Introduce a new error class.<commit_after>#
## PyMoira client library
##
## This file contains the Moira-related errors.
#
import moira_constants
class MoiraBaseError(Exception):
"""Any exception thrown by the library is inhereted from this"""
pass
class MoiraConnectionError(MoiraBaseError):
"""An error which prevents the client from having or continuing a meaningful
dialogue with a server (parsing failure, connection failure, etc)"""
pass
class MoiraError(MoiraBaseError):
"""An error returned from Moira server itself which has a Moira error code."""
def __init__(self, code):
if code in moira_constants.errors:
MoiraBaseError.__init__(self, "Moira error: %s" % moira_constants.errors[code])
else:
MoiraBaseError.__init__(self, "Unknown Moira error (code %i)" % code)
class MoiraUnavailableError(MoiraBaseError):
"""An error raised in case when Moira MOTD is not empty."""
pass
class MoiraUserError(MoiraBaseError):
"""An error related to Moira but not returned from the server."""
pass
|
70b41bfdb2e1fa6daa78f5a484a2825a25e3811e
|
apps/__init__.py
|
apps/__init__.py
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
Add logging to error output
|
Add logging to error output
|
Python
|
agpl-3.0
|
sociam/indx,sociam/indx,sociam/indx
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Add logging to error output
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
<commit_before>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
<commit_msg>Add logging to error output<commit_after>
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
Add logging to error output## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
<commit_before>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
<commit_msg>Add logging to error output<commit_after>## module loader, goes to see which submodules have 'html' directories
## and declares them at the toplevel
import os,importlib,logging
def find_module_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [o for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'__init__.py']))]
return subdirs
def find_html_dirs():
curdir = os.path.dirname(os.path.abspath(__file__))
subdirs = [(o,os.path.sep.join([curdir,o,'html'])) for o in os.listdir(curdir) if os.path.exists(os.path.sep.join([curdir,o,'html']))]
return dict(subdirs)
def import_app(app):
try:
importlib.import_module(app)
except Exception as e:
logging.error("Couldn't load app: {0}, error: {1}".format(app, e))
MODULES = {}
_html_dirs = find_html_dirs()
[ MODULES.update({m_name:{'module': import_app('.'.join(['apps',m_name])), 'html':_html_dirs.get(m_name)}}) for m_name in find_module_dirs() ]
|
95e347ae4086d05aadf91a393b856961b34026a5
|
website_field_autocomplete/controllers/main.py
|
website_field_autocomplete/controllers/main.py
|
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
res = {}
if limit:
limit = int(limit)
self.record_ids = request.env[model].search(domain, limit=limit)
for rec_id in self.record_ids:
res[rec_id.id] = {
k: getattr(rec_id, k, None) for k in fields
}
return res
|
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
if limit:
limit = int(limit)
res = request.env[model].search_read(
domain, fields, limit=limit
)
return {r['id']: r for r in res}
|
Use search_read * Use search_read in controller data getter, instead of custom implementation
|
[FIX] website_field_autocomplete: Use search_read
* Use search_read in controller data getter, instead of custom implementation
|
Python
|
agpl-3.0
|
Tecnativa/website,nicolas-petit/website,khaeusler/website,JayVora-SerpentCS/website,RoelAdriaans-B-informed/website,JayVora-SerpentCS/website,khaeusler/website,nicolas-petit/website,Tecnativa/website,khaeusler/website,Tecnativa/website,RoelAdriaans-B-informed/website,nicolas-petit/website,JayVora-SerpentCS/website,RoelAdriaans-B-informed/website,RoelAdriaans-B-informed/website
|
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
res = {}
if limit:
limit = int(limit)
self.record_ids = request.env[model].search(domain, limit=limit)
for rec_id in self.record_ids:
res[rec_id.id] = {
k: getattr(rec_id, k, None) for k in fields
}
return res
[FIX] website_field_autocomplete: Use search_read
* Use search_read in controller data getter, instead of custom implementation
|
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
if limit:
limit = int(limit)
res = request.env[model].search_read(
domain, fields, limit=limit
)
return {r['id']: r for r in res}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
res = {}
if limit:
limit = int(limit)
self.record_ids = request.env[model].search(domain, limit=limit)
for rec_id in self.record_ids:
res[rec_id.id] = {
k: getattr(rec_id, k, None) for k in fields
}
return res
<commit_msg>[FIX] website_field_autocomplete: Use search_read
* Use search_read in controller data getter, instead of custom implementation<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
if limit:
limit = int(limit)
res = request.env[model].search_read(
domain, fields, limit=limit
)
return {r['id']: r for r in res}
|
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
res = {}
if limit:
limit = int(limit)
self.record_ids = request.env[model].search(domain, limit=limit)
for rec_id in self.record_ids:
res[rec_id.id] = {
k: getattr(rec_id, k, None) for k in fields
}
return res
[FIX] website_field_autocomplete: Use search_read
* Use search_read in controller data getter, instead of custom implementation# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
if limit:
limit = int(limit)
res = request.env[model].search_read(
domain, fields, limit=limit
)
return {r['id']: r for r in res}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
res = {}
if limit:
limit = int(limit)
self.record_ids = request.env[model].search(domain, limit=limit)
for rec_id in self.record_ids:
res[rec_id.id] = {
k: getattr(rec_id, k, None) for k in fields
}
return res
<commit_msg>[FIX] website_field_autocomplete: Use search_read
* Use search_read in controller data getter, instead of custom implementation<commit_after># -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
import json
from openerp import http
from openerp.http import request
from openerp.addons.website.controllers.main import Website
class Website(Website):
@http.route(
'/website/field_autocomplete/<string:model>',
type='http',
auth='public',
methods=['GET'],
website=True,
)
def _get_field_autocomplete(self, model, **kwargs):
""" Return json autocomplete data """
domain = json.loads(kwargs.get('domain', "[]"))
fields = json.loads(kwargs.get('fields', "[]"))
limit = kwargs.get('limit', None)
res = self._get_autocomplete_data(model, domain, fields, limit)
return json.dumps(res.values())
def _get_autocomplete_data(self, model, domain, fields, limit=None):
""" Gets and returns raw record data
Params:
model: Model name to query on
domain: Search domain
fields: List of fields to get
limit: Limit results to
Returns:
Dict of record dicts, keyed by ID
"""
if limit:
limit = int(limit)
res = request.env[model].search_read(
domain, fields, limit=limit
)
return {r['id']: r for r in res}
|
3ec71d3925a3551f6f25fc25e827c88caaff1fdd
|
tests/integration/test_redirection_external.py
|
tests/integration/test_redirection_external.py
|
"""Check external REDIRECTIONS"""
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
"""Check external REDIRECTIONS"""
import os
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
def test_external_redirection(build, output_dir):
ext_link = os.path.join(output_dir, 'external.html')
assert os.path.exists(ext_link)
with open(ext_link) as ext_link_fd:
ext_link_content = ext_link_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=http://www.example.com/">'
assert redirect_tag in ext_link_content
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
Add test for external redirection.
|
Add test for external redirection.
|
Python
|
mit
|
okin/nikola,okin/nikola,okin/nikola,getnikola/nikola,getnikola/nikola,getnikola/nikola,okin/nikola,getnikola/nikola
|
"""Check external REDIRECTIONS"""
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
Add test for external redirection.
|
"""Check external REDIRECTIONS"""
import os
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
def test_external_redirection(build, output_dir):
ext_link = os.path.join(output_dir, 'external.html')
assert os.path.exists(ext_link)
with open(ext_link) as ext_link_fd:
ext_link_content = ext_link_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=http://www.example.com/">'
assert redirect_tag in ext_link_content
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
<commit_before>"""Check external REDIRECTIONS"""
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
<commit_msg>Add test for external redirection.<commit_after>
|
"""Check external REDIRECTIONS"""
import os
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
def test_external_redirection(build, output_dir):
ext_link = os.path.join(output_dir, 'external.html')
assert os.path.exists(ext_link)
with open(ext_link) as ext_link_fd:
ext_link_content = ext_link_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=http://www.example.com/">'
assert redirect_tag in ext_link_content
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
"""Check external REDIRECTIONS"""
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
Add test for external redirection."""Check external REDIRECTIONS"""
import os
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
def test_external_redirection(build, output_dir):
ext_link = os.path.join(output_dir, 'external.html')
assert os.path.exists(ext_link)
with open(ext_link) as ext_link_fd:
ext_link_content = ext_link_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=http://www.example.com/">'
assert redirect_tag in ext_link_content
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
<commit_before>"""Check external REDIRECTIONS"""
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
<commit_msg>Add test for external redirection.<commit_after>"""Check external REDIRECTIONS"""
import os
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
def test_external_redirection(build, output_dir):
ext_link = os.path.join(output_dir, 'external.html')
assert os.path.exists(ext_link)
with open(ext_link) as ext_link_fd:
ext_link_content = ext_link_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=http://www.example.com/">'
assert redirect_tag in ext_link_content
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
ad4effbdf95b51f151d613f02f70b4501bbe453d
|
tests/unit/extensions/flask_babel_unit_test.py
|
tests/unit/extensions/flask_babel_unit_test.py
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
|
Set default locale in test to avoid test failures when different default is used than expected.
|
Set default locale in test to avoid test failures when different default is used than expected.
|
Python
|
mit
|
BMeu/Orchard,BMeu/Orchard
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
Set default locale in test to avoid test failures when different default is used than expected.
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
|
<commit_before># -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
<commit_msg>Set default locale in test to avoid test failures when different default is used than expected.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
Set default locale in test to avoid test failures when different default is used than expected.# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
|
<commit_before># -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
<commit_msg>Set default locale in test to avoid test failures when different default is used than expected.<commit_after># -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
|
09851ff2903db29703616da0fbc9ec003955712a
|
zerver/lib/markdown/preprocessor_priorities.py
|
zerver/lib/markdown/preprocessor_priorities.py
|
# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
"help_relative_links": 475,
"setting": 450,
"fenced_code_block": 25,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
# Priorities for the built-in preprocessors are commented out.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
Document built-in preprocessor priorities for convenience.
|
markdown: Document built-in preprocessor priorities for convenience.
Fixes #19810
|
Python
|
apache-2.0
|
eeshangarg/zulip,rht/zulip,rht/zulip,kou/zulip,eeshangarg/zulip,rht/zulip,eeshangarg/zulip,zulip/zulip,rht/zulip,andersk/zulip,kou/zulip,eeshangarg/zulip,kou/zulip,andersk/zulip,andersk/zulip,andersk/zulip,kou/zulip,andersk/zulip,rht/zulip,rht/zulip,zulip/zulip,kou/zulip,zulip/zulip,zulip/zulip,eeshangarg/zulip,andersk/zulip,kou/zulip,kou/zulip,andersk/zulip,eeshangarg/zulip,rht/zulip,zulip/zulip,zulip/zulip,eeshangarg/zulip,zulip/zulip
|
# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
"help_relative_links": 475,
"setting": 450,
"fenced_code_block": 25,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
markdown: Document built-in preprocessor priorities for convenience.
Fixes #19810
|
# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
# Priorities for the built-in preprocessors are commented out.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
<commit_before># Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
"help_relative_links": 475,
"setting": 450,
"fenced_code_block": 25,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
<commit_msg>markdown: Document built-in preprocessor priorities for convenience.
Fixes #19810<commit_after>
|
# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
# Priorities for the built-in preprocessors are commented out.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
"help_relative_links": 475,
"setting": 450,
"fenced_code_block": 25,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
markdown: Document built-in preprocessor priorities for convenience.
Fixes #19810# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
# Priorities for the built-in preprocessors are commented out.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
<commit_before># Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
"help_relative_links": 475,
"setting": 450,
"fenced_code_block": 25,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
<commit_msg>markdown: Document built-in preprocessor priorities for convenience.
Fixes #19810<commit_after># Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
# Priorities for the built-in preprocessors are commented out.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
6835fa9e8978a081186008785bd2e11522372aa9
|
tests/utils.py
|
tests/utils.py
|
import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
return pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout)))
|
import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
pat4 = re.compile(b'\d\d\d\d-\d\d-\d\d')
return pat4.sub(b'0000-00-00', pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout))))
|
Fix dates in test output
|
Fix dates in test output
|
Python
|
mit
|
raphaelm/python-sepadd,lutoma/python-sepadd
|
import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
return pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout)))
Fix dates in test output
|
import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
pat4 = re.compile(b'\d\d\d\d-\d\d-\d\d')
return pat4.sub(b'0000-00-00', pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout))))
|
<commit_before>import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
return pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout)))
<commit_msg>Fix dates in test output<commit_after>
|
import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
pat4 = re.compile(b'\d\d\d\d-\d\d-\d\d')
return pat4.sub(b'0000-00-00', pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout))))
|
import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
return pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout)))
Fix dates in test outputimport os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
pat4 = re.compile(b'\d\d\d\d-\d\d-\d\d')
return pat4.sub(b'0000-00-00', pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout))))
|
<commit_before>import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
return pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout)))
<commit_msg>Fix dates in test output<commit_after>import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
pat4 = re.compile(b'\d\d\d\d-\d\d-\d\d')
return pat4.sub(b'0000-00-00', pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout))))
|
36bb5605b4ec7a062190e8f5ef755023c0b2f6e4
|
rebulk/debug.py
|
rebulk/debug.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = 0
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import logging
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = logging.DEBUG
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
|
Set default LOG_LEVEL to logging.DEBUG
|
Set default LOG_LEVEL to logging.DEBUG
|
Python
|
mit
|
Toilal/rebulk
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = 0
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
Set default LOG_LEVEL to logging.DEBUG
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import logging
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = logging.DEBUG
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = 0
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
<commit_msg>Set default LOG_LEVEL to logging.DEBUG<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import logging
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = logging.DEBUG
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = 0
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
Set default LOG_LEVEL to logging.DEBUG#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import logging
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = logging.DEBUG
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = 0
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
<commit_msg>Set default LOG_LEVEL to logging.DEBUG<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Debug tools.
Can be configured by changing values of those variable.
DEBUG = False
Enable this variable to activate debug features (like defined_at parameters). It can slow down Rebulk
LOG_LEVEL = 0
Default log level of generated rebulk logs.
"""
import inspect
import logging
import os
from collections import namedtuple
DEBUG = False
LOG_LEVEL = logging.DEBUG
class Frame(namedtuple('Frame', ['lineno', 'package', 'name', 'filename'])):
"""
Stack frame representation.
"""
__slots__ = ()
def __repr__(self):
return "%s#L%s" % (os.path.basename(self.filename), self.lineno)
def defined_at():
"""
Get definition location of a pattern or a match (outside of rebulk package).
:return:
:rtype:
"""
if DEBUG:
frame = inspect.currentframe()
while frame:
try:
if frame.f_globals['__package__'] != __package__:
break
except KeyError: # pragma:no cover
# If package is missing, consider we are in. Workaround for python 3.3.
break
frame = frame.f_back
ret = Frame(frame.f_lineno,
frame.f_globals.get('__package__'),
frame.f_globals.get('__name__'),
frame.f_code.co_filename)
del frame
return ret
|
d6b2dc137111e0a077625feefb0a2c70fc8e789b
|
Lib/__init__.py
|
Lib/__init__.py
|
"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy import *
del fft, ifft, info
import numpy
__all__.extend(filter(lambda x: x not in ['fft','ifft','info'], numpy.__all__))
del numpy
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
|
"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
|
Remove auto include of numpy namespace.
|
Remove auto include of numpy namespace.
|
Python
|
bsd-3-clause
|
mgaitan/scipy,rgommers/scipy,Srisai85/scipy,tylerjereddy/scipy,juliantaylor/scipy,sonnyhu/scipy,apbard/scipy,juliantaylor/scipy,zxsted/scipy,behzadnouri/scipy,mikebenfield/scipy,richardotis/scipy,nmayorov/scipy,pnedunuri/scipy,befelix/scipy,anntzer/scipy,mortada/scipy,chatcannon/scipy,WarrenWeckesser/scipy,mingwpy/scipy,aarchiba/scipy,vhaasteren/scipy,Newman101/scipy,josephcslater/scipy,maniteja123/scipy,richardotis/scipy,Srisai85/scipy,lhilt/scipy,ilayn/scipy,hainm/scipy,behzadnouri/scipy,njwilson23/scipy,scipy/scipy,fredrikw/scipy,grlee77/scipy,andyfaff/scipy,Eric89GXL/scipy,piyush0609/scipy,lhilt/scipy,lhilt/scipy,pnedunuri/scipy,jonycgn/scipy,WillieMaddox/scipy,Stefan-Endres/scipy,larsmans/scipy,petebachant/scipy,grlee77/scipy,scipy/scipy,nonhermitian/scipy,zaxliu/scipy,mortada/scipy,fernand/scipy,njwilson23/scipy,vberaudi/scipy,gdooper/scipy,niknow/scipy,tylerjereddy/scipy,ilayn/scipy,matthewalbani/scipy,anntzer/scipy,fernand/scipy,minhlongdo/scipy,FRidh/scipy,haudren/scipy,befelix/scipy,befelix/scipy,Shaswat27/scipy,jonycgn/scipy,ortylp/scipy,haudren/scipy,anielsen001/scipy,nonhermitian/scipy,haudren/scipy,gfyoung/scipy,minhlongdo/scipy,pizzathief/scipy,raoulbq/scipy,jseabold/scipy,zxsted/scipy,jamestwebber/scipy,befelix/scipy,vberaudi/scipy,matthewalbani/scipy,richardotis/scipy,ChanderG/scipy,jsilter/scipy,aarchiba/scipy,aeklant/scipy,haudren/scipy,WarrenWeckesser/scipy,aman-iitj/scipy,giorgiop/scipy,e-q/scipy,andim/scipy,sargas/scipy,mortonjt/scipy,futurulus/scipy,perimosocordiae/scipy,mortada/scipy,person142/scipy,gef756/scipy,matthew-brett/scipy,zerothi/scipy,zxsted/scipy,newemailjdm/scipy,gef756/scipy,Stefan-Endres/scipy,anielsen001/scipy,minhlongdo/scipy,mortonjt/scipy,jsilter/scipy,perimosocordiae/scipy,grlee77/scipy,argriffing/scipy,larsmans/scipy,futurulus/scipy,ales-erjavec/scipy,perimosocordiae/scipy,sargas/scipy,mingwpy/scipy,jsilter/scipy,Kamp9/scipy,jor-/scipy,vhaasteren/scipy,mingwpy/scipy,bkendzior/scipy,kalvdans/scipy,hainm/scipy,petebachant/scipy,Stefan-Endres/scipy,jonycgn/scipy,jakevdp/scipy,sriki18/scipy,ChanderG/scipy,mdhaber/scipy,zxsted/scipy,jjhelmus/scipy,minhlongdo/scipy,endolith/scipy,aman-iitj/scipy,sriki18/scipy,witcxc/scipy,minhlongdo/scipy,endolith/scipy,FRidh/scipy,nonhermitian/scipy,rgommers/scipy,Srisai85/scipy,mingwpy/scipy,surhudm/scipy,pschella/scipy,vigna/scipy,zxsted/scipy,larsmans/scipy,teoliphant/scipy,aeklant/scipy,bkendzior/scipy,pschella/scipy,sauliusl/scipy,gertingold/scipy,mortonjt/scipy,matthewalbani/scipy,mortonjt/scipy,endolith/scipy,piyush0609/scipy,pbrod/scipy,sriki18/scipy,niknow/scipy,FRidh/scipy,jor-/scipy,mtrbean/scipy,fernand/scipy,larsmans/scipy,surhudm/scipy,gef756/scipy,andyfaff/scipy,andyfaff/scipy,ogrisel/scipy,teoliphant/scipy,ogrisel/scipy,behzadnouri/scipy,giorgiop/scipy,Eric89GXL/scipy,zerothi/scipy,gertingold/scipy,jjhelmus/scipy,ilayn/scipy,zerothi/scipy,pbrod/scipy,zxsted/scipy,jor-/scipy,efiring/scipy,dominicelse/scipy,matthew-brett/scipy,anntzer/scipy,aeklant/scipy,felipebetancur/scipy,larsmans/scipy,Shaswat27/scipy,chatcannon/scipy,felipebetancur/scipy,gfyoung/scipy,mikebenfield/scipy,matthew-brett/scipy,andim/scipy,matthewalbani/scipy,jor-/scipy,Stefan-Endres/scipy,jsilter/scipy,haudren/scipy,tylerjereddy/scipy,pnedunuri/scipy,aman-iitj/scipy,gdooper/scipy,perimosocordiae/scipy,pbrod/scipy,dominicelse/scipy,pnedunuri/scipy,mdhaber/scipy,sauliusl/scipy,Eric89GXL/scipy,felipebetancur/scipy,maciejkula/scipy,kleskjr/scipy,mgaitan/scipy,zaxliu/scipy,kleskjr/scipy,Gillu13/scipy,WarrenWeckesser/scipy,fernand/scipy,vigna/scipy,FRidh/scipy,Srisai85/scipy,mgaitan/scipy,raoulbq/scipy,grlee77/scipy,jjhelmus/scipy,nvoron23/scipy,e-q/scipy,efiring/scipy,giorgiop/scipy,cpaulik/scipy,argriffing/scipy,andim/scipy,petebachant/scipy,vberaudi/scipy,ortylp/scipy,pyramania/scipy,behzadnouri/scipy,cpaulik/scipy,hainm/scipy,giorgiop/scipy,ales-erjavec/scipy,kleskjr/scipy,jamestwebber/scipy,vberaudi/scipy,mdhaber/scipy,ales-erjavec/scipy,nmayorov/scipy,anntzer/scipy,jseabold/scipy,jonycgn/scipy,ortylp/scipy,andim/scipy,pizzathief/scipy,ChanderG/scipy,pnedunuri/scipy,ndchorley/scipy,vanpact/scipy,Newman101/scipy,pyramania/scipy,newemailjdm/scipy,mhogg/scipy,jseabold/scipy,richardotis/scipy,niknow/scipy,futurulus/scipy,gef756/scipy,Dapid/scipy,trankmichael/scipy,behzadnouri/scipy,niknow/scipy,aeklant/scipy,aarchiba/scipy,niknow/scipy,tylerjereddy/scipy,giorgiop/scipy,njwilson23/scipy,dominicelse/scipy,mortonjt/scipy,mdhaber/scipy,sonnyhu/scipy,vanpact/scipy,rmcgibbo/scipy,jakevdp/scipy,maniteja123/scipy,kalvdans/scipy,zerothi/scipy,andim/scipy,mtrbean/scipy,lukauskas/scipy,maniteja123/scipy,Gillu13/scipy,lukauskas/scipy,WillieMaddox/scipy,larsmans/scipy,kleskjr/scipy,mhogg/scipy,sargas/scipy,person142/scipy,sriki18/scipy,arokem/scipy,zaxliu/scipy,rmcgibbo/scipy,mortonjt/scipy,zaxliu/scipy,Eric89GXL/scipy,mgaitan/scipy,petebachant/scipy,cpaulik/scipy,fredrikw/scipy,matthewalbani/scipy,endolith/scipy,mikebenfield/scipy,ndchorley/scipy,sonnyhu/scipy,pbrod/scipy,ogrisel/scipy,jor-/scipy,witcxc/scipy,newemailjdm/scipy,trankmichael/scipy,arokem/scipy,jakevdp/scipy,sauliusl/scipy,petebachant/scipy,nmayorov/scipy,juliantaylor/scipy,minhlongdo/scipy,bkendzior/scipy,andyfaff/scipy,raoulbq/scipy,rgommers/scipy,raoulbq/scipy,mtrbean/scipy,efiring/scipy,Dapid/scipy,maniteja123/scipy,behzadnouri/scipy,andyfaff/scipy,anntzer/scipy,dch312/scipy,jonycgn/scipy,gdooper/scipy,witcxc/scipy,Gillu13/scipy,richardotis/scipy,vhaasteren/scipy,Gillu13/scipy,chatcannon/scipy,endolith/scipy,rmcgibbo/scipy,efiring/scipy,Gillu13/scipy,person142/scipy,chatcannon/scipy,ndchorley/scipy,felipebetancur/scipy,anielsen001/scipy,pizzathief/scipy,gertingold/scipy,WillieMaddox/scipy,mortada/scipy,aarchiba/scipy,anielsen001/scipy,dominicelse/scipy,kalvdans/scipy,josephcslater/scipy,jakevdp/scipy,newemailjdm/scipy,richardotis/scipy,mtrbean/scipy,petebachant/scipy,fredrikw/scipy,trankmichael/scipy,ogrisel/scipy,njwilson23/scipy,sonnyhu/scipy,zerothi/scipy,tylerjereddy/scipy,surhudm/scipy,ndchorley/scipy,Dapid/scipy,piyush0609/scipy,argriffing/scipy,mhogg/scipy,pbrod/scipy,sriki18/scipy,person142/scipy,ales-erjavec/scipy,jamestwebber/scipy,lukauskas/scipy,lukauskas/scipy,arokem/scipy,Shaswat27/scipy,WillieMaddox/scipy,ilayn/scipy,rmcgibbo/scipy,lukauskas/scipy,nvoron23/scipy,sauliusl/scipy,ndchorley/scipy,Shaswat27/scipy,ilayn/scipy,arokem/scipy,piyush0609/scipy,felipebetancur/scipy,ilayn/scipy,gertingold/scipy,scipy/scipy,rmcgibbo/scipy,bkendzior/scipy,sargas/scipy,woodscn/scipy,sauliusl/scipy,Dapid/scipy,gdooper/scipy,ales-erjavec/scipy,pyramania/scipy,gfyoung/scipy,Kamp9/scipy,Eric89GXL/scipy,fredrikw/scipy,dominicelse/scipy,endolith/scipy,woodscn/scipy,pizzathief/scipy,mdhaber/scipy,mhogg/scipy,surhudm/scipy,WarrenWeckesser/scipy,dch312/scipy,maniteja123/scipy,dch312/scipy,sargas/scipy,fredrikw/scipy,nvoron23/scipy,jamestwebber/scipy,sriki18/scipy,gdooper/scipy,kleskjr/scipy,argriffing/scipy,njwilson23/scipy,apbard/scipy,lhilt/scipy,futurulus/scipy,mgaitan/scipy,cpaulik/scipy,e-q/scipy,Newman101/scipy,anielsen001/scipy,jjhelmus/scipy,zaxliu/scipy,apbard/scipy,WarrenWeckesser/scipy,scipy/scipy,pbrod/scipy,jakevdp/scipy,trankmichael/scipy,mtrbean/scipy,person142/scipy,maciejkula/scipy,Dapid/scipy,rgommers/scipy,niknow/scipy,Kamp9/scipy,josephcslater/scipy,hainm/scipy,woodscn/scipy,WarrenWeckesser/scipy,raoulbq/scipy,mikebenfield/scipy,perimosocordiae/scipy,trankmichael/scipy,nvoron23/scipy,cpaulik/scipy,pschella/scipy,apbard/scipy,hainm/scipy,giorgiop/scipy,bkendzior/scipy,sonnyhu/scipy,jsilter/scipy,kleskjr/scipy,vigna/scipy,aman-iitj/scipy,efiring/scipy,mortada/scipy,vberaudi/scipy,vigna/scipy,jjhelmus/scipy,matthew-brett/scipy,Srisai85/scipy,gertingold/scipy,trankmichael/scipy,Kamp9/scipy,zerothi/scipy,pschella/scipy,Newman101/scipy,gfyoung/scipy,kalvdans/scipy,surhudm/scipy,Eric89GXL/scipy,kalvdans/scipy,maciejkula/scipy,teoliphant/scipy,ortylp/scipy,e-q/scipy,ChanderG/scipy,aarchiba/scipy,grlee77/scipy,perimosocordiae/scipy,haudren/scipy,ortylp/scipy,vhaasteren/scipy,nmayorov/scipy,vanpact/scipy,efiring/scipy,Stefan-Endres/scipy,njwilson23/scipy,pizzathief/scipy,mortada/scipy,teoliphant/scipy,maniteja123/scipy,ChanderG/scipy,josephcslater/scipy,nvoron23/scipy,teoliphant/scipy,nvoron23/scipy,Kamp9/scipy,ChanderG/scipy,Gillu13/scipy,lukauskas/scipy,WillieMaddox/scipy,fernand/scipy,rgommers/scipy,lhilt/scipy,jseabold/scipy,woodscn/scipy,piyush0609/scipy,rmcgibbo/scipy,FRidh/scipy,chatcannon/scipy,gef756/scipy,maciejkula/scipy,zaxliu/scipy,Dapid/scipy,pyramania/scipy,vanpact/scipy,Shaswat27/scipy,vanpact/scipy,vanpact/scipy,josephcslater/scipy,jseabold/scipy,witcxc/scipy,jonycgn/scipy,argriffing/scipy,juliantaylor/scipy,ortylp/scipy,mingwpy/scipy,fernand/scipy,pschella/scipy,pyramania/scipy,futurulus/scipy,nmayorov/scipy,vhaasteren/scipy,Srisai85/scipy,ogrisel/scipy,juliantaylor/scipy,newemailjdm/scipy,pnedunuri/scipy,argriffing/scipy,mgaitan/scipy,vhaasteren/scipy,dch312/scipy,nonhermitian/scipy,woodscn/scipy,raoulbq/scipy,Shaswat27/scipy,sauliusl/scipy,apbard/scipy,aman-iitj/scipy,jseabold/scipy,ales-erjavec/scipy,ndchorley/scipy,aeklant/scipy,aman-iitj/scipy,witcxc/scipy,scipy/scipy,vberaudi/scipy,piyush0609/scipy,chatcannon/scipy,surhudm/scipy,matthew-brett/scipy,newemailjdm/scipy,e-q/scipy,mingwpy/scipy,maciejkula/scipy,arokem/scipy,gef756/scipy,mhogg/scipy,FRidh/scipy,jamestwebber/scipy,nonhermitian/scipy,Kamp9/scipy,Stefan-Endres/scipy,fredrikw/scipy,gfyoung/scipy,befelix/scipy,Newman101/scipy,cpaulik/scipy,mtrbean/scipy,Newman101/scipy,andyfaff/scipy,vigna/scipy,futurulus/scipy,felipebetancur/scipy,scipy/scipy,anntzer/scipy,andim/scipy,mikebenfield/scipy,woodscn/scipy,anielsen001/scipy,WillieMaddox/scipy,mdhaber/scipy,mhogg/scipy,sonnyhu/scipy,hainm/scipy,dch312/scipy
|
"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy import *
del fft, ifft, info
import numpy
__all__.extend(filter(lambda x: x not in ['fft','ifft','info'], numpy.__all__))
del numpy
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
Remove auto include of numpy namespace.
|
"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
|
<commit_before>"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy import *
del fft, ifft, info
import numpy
__all__.extend(filter(lambda x: x not in ['fft','ifft','info'], numpy.__all__))
del numpy
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
<commit_msg>Remove auto include of numpy namespace.<commit_after>
|
"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
|
"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy import *
del fft, ifft, info
import numpy
__all__.extend(filter(lambda x: x not in ['fft','ifft','info'], numpy.__all__))
del numpy
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
Remove auto include of numpy namespace."""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
|
<commit_before>"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy import *
del fft, ifft, info
import numpy
__all__.extend(filter(lambda x: x not in ['fft','ifft','info'], numpy.__all__))
del numpy
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
<commit_msg>Remove auto include of numpy namespace.<commit_after>"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
|
56c25218cb3c987201839917930fc1ae791b5601
|
reg/__init__.py
|
reg/__init__.py
|
# flake8: noqa
from .dispatch import dispatch, Dispatch
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
|
# flake8: noqa
from .dispatch import dispatch, Dispatch, LookupEntry
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
|
Add LookupEntry to the API.
|
Add LookupEntry to the API.
|
Python
|
bsd-3-clause
|
morepath/reg,taschini/reg
|
# flake8: noqa
from .dispatch import dispatch, Dispatch
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
Add LookupEntry to the API.
|
# flake8: noqa
from .dispatch import dispatch, Dispatch, LookupEntry
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
|
<commit_before># flake8: noqa
from .dispatch import dispatch, Dispatch
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
<commit_msg>Add LookupEntry to the API.<commit_after>
|
# flake8: noqa
from .dispatch import dispatch, Dispatch, LookupEntry
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
|
# flake8: noqa
from .dispatch import dispatch, Dispatch
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
Add LookupEntry to the API.# flake8: noqa
from .dispatch import dispatch, Dispatch, LookupEntry
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
|
<commit_before># flake8: noqa
from .dispatch import dispatch, Dispatch
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
<commit_msg>Add LookupEntry to the API.<commit_after># flake8: noqa
from .dispatch import dispatch, Dispatch, LookupEntry
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
|
00a5d82c99ce6fb7096d432f12959ab4d8218f4f
|
booster_bdd/features/src/importBooster.py
|
booster_bdd/features/src/importBooster.py
|
import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.environ.get('PROJECT_NAME')
pipeline = os.environ.get('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print 'Starting test.....'
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
print 'Making request to import...'
r = requests.post('https://forge.api.openshift.io/api/osio/import',
headers=headers, data=data)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
|
import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.getenv('PROJECT_NAME')
pipeline = os.getenv('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print('Starting test.....')
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
forgeApi = os.getenv("FORGE_API")
print('Making request to import...')
r = requests.post(
'{}/api/osio/import'.format(forgeApi),
headers=headers,
data=data
)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
|
Replace hardcoded Forge API URL by variable.
|
booster-bdd: Replace hardcoded Forge API URL by variable.
|
Python
|
apache-2.0
|
ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test
|
import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.environ.get('PROJECT_NAME')
pipeline = os.environ.get('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print 'Starting test.....'
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
print 'Making request to import...'
r = requests.post('https://forge.api.openshift.io/api/osio/import',
headers=headers, data=data)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
booster-bdd: Replace hardcoded Forge API URL by variable.
|
import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.getenv('PROJECT_NAME')
pipeline = os.getenv('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print('Starting test.....')
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
forgeApi = os.getenv("FORGE_API")
print('Making request to import...')
r = requests.post(
'{}/api/osio/import'.format(forgeApi),
headers=headers,
data=data
)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
|
<commit_before>import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.environ.get('PROJECT_NAME')
pipeline = os.environ.get('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print 'Starting test.....'
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
print 'Making request to import...'
r = requests.post('https://forge.api.openshift.io/api/osio/import',
headers=headers, data=data)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
<commit_msg>booster-bdd: Replace hardcoded Forge API URL by variable.<commit_after>
|
import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.getenv('PROJECT_NAME')
pipeline = os.getenv('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print('Starting test.....')
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
forgeApi = os.getenv("FORGE_API")
print('Making request to import...')
r = requests.post(
'{}/api/osio/import'.format(forgeApi),
headers=headers,
data=data
)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
|
import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.environ.get('PROJECT_NAME')
pipeline = os.environ.get('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print 'Starting test.....'
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
print 'Making request to import...'
r = requests.post('https://forge.api.openshift.io/api/osio/import',
headers=headers, data=data)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
booster-bdd: Replace hardcoded Forge API URL by variable.import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.getenv('PROJECT_NAME')
pipeline = os.getenv('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print('Starting test.....')
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
forgeApi = os.getenv("FORGE_API")
print('Making request to import...')
r = requests.post(
'{}/api/osio/import'.format(forgeApi),
headers=headers,
data=data
)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
|
<commit_before>import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.environ.get('PROJECT_NAME')
pipeline = os.environ.get('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print 'Starting test.....'
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
print 'Making request to import...'
r = requests.post('https://forge.api.openshift.io/api/osio/import',
headers=headers, data=data)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
<commit_msg>booster-bdd: Replace hardcoded Forge API URL by variable.<commit_after>import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.getenv('PROJECT_NAME')
pipeline = os.getenv('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print('Starting test.....')
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
forgeApi = os.getenv("FORGE_API")
print('Making request to import...')
r = requests.post(
'{}/api/osio/import'.format(forgeApi),
headers=headers,
data=data
)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
|
c9e4a05ed2677fd569642e0ef77dd9f63bf3e15f
|
vumi/persist/tests/test_redis_base.py
|
vumi/persist/tests/test_redis_base.py
|
"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client, key_prefix='test'):
return Manager(client, key_prefix)
def test_sub_manager(self):
dummy_client = object()
manager = self.mk_manager(dummy_client)
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, dummy_client)
|
"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client=None, key_prefix='test'):
if client is None:
client = object()
return Manager(client, key_prefix)
def test_sub_manager(self):
manager = self.mk_manager()
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, manager._client)
self.assertEqual(sub_manager._key_separator, manager._key_separator)
|
Make sub_manager test neater and also check key_separator.
|
Make sub_manager test neater and also check key_separator.
|
Python
|
bsd-3-clause
|
TouK/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi
|
"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client, key_prefix='test'):
return Manager(client, key_prefix)
def test_sub_manager(self):
dummy_client = object()
manager = self.mk_manager(dummy_client)
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, dummy_client)
Make sub_manager test neater and also check key_separator.
|
"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client=None, key_prefix='test'):
if client is None:
client = object()
return Manager(client, key_prefix)
def test_sub_manager(self):
manager = self.mk_manager()
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, manager._client)
self.assertEqual(sub_manager._key_separator, manager._key_separator)
|
<commit_before>"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client, key_prefix='test'):
return Manager(client, key_prefix)
def test_sub_manager(self):
dummy_client = object()
manager = self.mk_manager(dummy_client)
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, dummy_client)
<commit_msg>Make sub_manager test neater and also check key_separator.<commit_after>
|
"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client=None, key_prefix='test'):
if client is None:
client = object()
return Manager(client, key_prefix)
def test_sub_manager(self):
manager = self.mk_manager()
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, manager._client)
self.assertEqual(sub_manager._key_separator, manager._key_separator)
|
"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client, key_prefix='test'):
return Manager(client, key_prefix)
def test_sub_manager(self):
dummy_client = object()
manager = self.mk_manager(dummy_client)
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, dummy_client)
Make sub_manager test neater and also check key_separator."""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client=None, key_prefix='test'):
if client is None:
client = object()
return Manager(client, key_prefix)
def test_sub_manager(self):
manager = self.mk_manager()
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, manager._client)
self.assertEqual(sub_manager._key_separator, manager._key_separator)
|
<commit_before>"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client, key_prefix='test'):
return Manager(client, key_prefix)
def test_sub_manager(self):
dummy_client = object()
manager = self.mk_manager(dummy_client)
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, dummy_client)
<commit_msg>Make sub_manager test neater and also check key_separator.<commit_after>"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client=None, key_prefix='test'):
if client is None:
client = object()
return Manager(client, key_prefix)
def test_sub_manager(self):
manager = self.mk_manager()
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, manager._client)
self.assertEqual(sub_manager._key_separator, manager._key_separator)
|
1a5e55e1a0354182a2b23cd51292cb1cd3c3a88d
|
mrburns/main/context_processors.py
|
mrburns/main/context_processors.py
|
from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': 'https://{}{}'.format(request.get_host(),
request.path)
}
|
from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': u'https://{}{}'.format(request.get_host(),
request.path)
}
|
Handle unicode paths in glow_variables
|
Handle unicode paths in glow_variables
|
Python
|
mpl-2.0
|
mozilla/mrburns,mozilla/mrburns,mozilla/mrburns
|
from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': 'https://{}{}'.format(request.get_host(),
request.path)
}
Handle unicode paths in glow_variables
|
from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': u'https://{}{}'.format(request.get_host(),
request.path)
}
|
<commit_before>from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': 'https://{}{}'.format(request.get_host(),
request.path)
}
<commit_msg>Handle unicode paths in glow_variables<commit_after>
|
from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': u'https://{}{}'.format(request.get_host(),
request.path)
}
|
from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': 'https://{}{}'.format(request.get_host(),
request.path)
}
Handle unicode paths in glow_variablesfrom django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': u'https://{}{}'.format(request.get_host(),
request.path)
}
|
<commit_before>from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': 'https://{}{}'.format(request.get_host(),
request.path)
}
<commit_msg>Handle unicode paths in glow_variables<commit_after>from django.conf import settings
def glow_variables(request):
return {
'MAP_DATA_URL': settings.MAP_DATA_URL,
'OG_ABS_URL': u'https://{}{}'.format(request.get_host(),
request.path)
}
|
ce28d39244b75ee0dd865017b4cf1a0125bf4887
|
ynr/apps/parties/serializers.py
|
ynr/apps/parties/serializers.py
|
from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name")
|
from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name", "legacy_slug")
|
Add legacy slug to embedded Party on memberships
|
Add legacy slug to embedded Party on memberships
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name")
Add legacy slug to embedded Party on memberships
|
from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name", "legacy_slug")
|
<commit_before>from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name")
<commit_msg>Add legacy slug to embedded Party on memberships<commit_after>
|
from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name", "legacy_slug")
|
from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name")
Add legacy slug to embedded Party on membershipsfrom rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name", "legacy_slug")
|
<commit_before>from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name")
<commit_msg>Add legacy slug to embedded Party on memberships<commit_after>from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name", "legacy_slug")
|
c37abb2849dc3c4b885673220f9f9965109f0be6
|
sieve/sieve.py
|
sieve/sieve.py
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
Revert back to a generator - it's actually slight faster
|
Revert back to a generator - it's actually slight faster
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
Revert back to a generator - it's actually slight faster
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
<commit_before>def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
<commit_msg>Revert back to a generator - it's actually slight faster<commit_after>
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
Revert back to a generator - it's actually slight fasterdef sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
<commit_before>def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
<commit_msg>Revert back to a generator - it's actually slight faster<commit_after>def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
2127e3adf190736e14f8500753ffc58126cb39f4
|
ovp_search/tests/test_execution.py
|
ovp_search/tests/test_execution.py
|
import ovp_search.apps
|
import ovp_search.apps
from django.test import TestCase
from django.core.management import call_command
class RebuildIndexTestCase(TestCase):
def test_rebuild_index_execution(self):
call_command('rebuild_index', '--noinput', verbosity=0)
|
Add test case for index rebuilding
|
Add test case for index rebuilding
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-search
|
import ovp_search.apps
Add test case for index rebuilding
|
import ovp_search.apps
from django.test import TestCase
from django.core.management import call_command
class RebuildIndexTestCase(TestCase):
def test_rebuild_index_execution(self):
call_command('rebuild_index', '--noinput', verbosity=0)
|
<commit_before>import ovp_search.apps
<commit_msg>Add test case for index rebuilding<commit_after>
|
import ovp_search.apps
from django.test import TestCase
from django.core.management import call_command
class RebuildIndexTestCase(TestCase):
def test_rebuild_index_execution(self):
call_command('rebuild_index', '--noinput', verbosity=0)
|
import ovp_search.apps
Add test case for index rebuildingimport ovp_search.apps
from django.test import TestCase
from django.core.management import call_command
class RebuildIndexTestCase(TestCase):
def test_rebuild_index_execution(self):
call_command('rebuild_index', '--noinput', verbosity=0)
|
<commit_before>import ovp_search.apps
<commit_msg>Add test case for index rebuilding<commit_after>import ovp_search.apps
from django.test import TestCase
from django.core.management import call_command
class RebuildIndexTestCase(TestCase):
def test_rebuild_index_execution(self):
call_command('rebuild_index', '--noinput', verbosity=0)
|
e23d5a64cfd5604f74cce583db3366f2cabb5e1f
|
tests/basics/builtin_minmax.py
|
tests/basics/builtin_minmax.py
|
# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
|
# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
# 'default' tests
print(min([1, 2, 3, 4, 5], default=-1))
print(min([], default=-1))
print(max([1, 2, 3, 4, 5], default=-1))
print(max([], default=-1))
|
Add min/max "default" agrument test
|
tests: Add min/max "default" agrument test
|
Python
|
mit
|
mpalomer/micropython,dinau/micropython,henriknelson/micropython,deshipu/micropython,blazewicz/micropython,supergis/micropython,MrSurly/micropython-esp32,henriknelson/micropython,torwag/micropython,matthewelse/micropython,swegener/micropython,dmazzella/micropython,turbinenreiter/micropython,Timmenem/micropython,tralamazza/micropython,ryannathans/micropython,oopy/micropython,infinnovation/micropython,AriZuu/micropython,martinribelotta/micropython,micropython/micropython-esp32,ernesto-g/micropython,bvernoux/micropython,misterdanb/micropython,galenhz/micropython,toolmacher/micropython,micropython/micropython-esp32,mpalomer/micropython,micropython/micropython-esp32,misterdanb/micropython,hiway/micropython,ganshun666/micropython,alex-march/micropython,pramasoul/micropython,cwyark/micropython,pozetroninc/micropython,Timmenem/micropython,adamkh/micropython,pozetroninc/micropython,adafruit/micropython,pozetroninc/micropython,infinnovation/micropython,adafruit/circuitpython,dxxb/micropython,dxxb/micropython,dinau/micropython,tobbad/micropython,EcmaXp/micropython,martinribelotta/micropython,misterdanb/micropython,tralamazza/micropython,TDAbboud/micropython,selste/micropython,hosaka/micropython,martinribelotta/micropython,adafruit/micropython,torwag/micropython,MrSurly/micropython,emfcamp/micropython,Peetz0r/micropython-esp32,emfcamp/micropython,xhat/micropython,selste/micropython,pramasoul/micropython,henriknelson/micropython,kerneltask/micropython,adafruit/circuitpython,tuc-osg/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,selste/micropython,trezor/micropython,Timmenem/micropython,adafruit/micropython,dinau/micropython,selste/micropython,alex-march/micropython,pfalcon/micropython,alex-robbins/micropython,HenrikSolver/micropython,ryannathans/micropython,puuu/micropython,alex-robbins/micropython,supergis/micropython,adafruit/circuitpython,praemdonck/micropython,pfalcon/micropython,tobbad/micropython,infinnovation/micropython,ernesto-g/micropython,matthewelse/micropython,PappaPeppar/micropython,MrSurly/micropython,mhoffma/micropython,ganshun666/micropython,mianos/micropython,praemdonck/micropython,galenhz/micropython,dmazzella/micropython,misterdanb/micropython,neilh10/micropython,henriknelson/micropython,oopy/micropython,galenhz/micropython,micropython/micropython-esp32,pfalcon/micropython,danicampora/micropython,matthewelse/micropython,TDAbboud/micropython,toolmacher/micropython,mpalomer/micropython,swegener/micropython,hosaka/micropython,tuc-osg/micropython,turbinenreiter/micropython,galenhz/micropython,blazewicz/micropython,lowRISC/micropython,jmarcelino/pycom-micropython,matthewelse/micropython,drrk/micropython,turbinenreiter/micropython,jmarcelino/pycom-micropython,oopy/micropython,chrisdearman/micropython,adafruit/circuitpython,mhoffma/micropython,lowRISC/micropython,redbear/micropython,ganshun666/micropython,mpalomer/micropython,xhat/micropython,alex-march/micropython,praemdonck/micropython,pfalcon/micropython,EcmaXp/micropython,matthewelse/micropython,xhat/micropython,redbear/micropython,mianos/micropython,TDAbboud/micropython,TDAbboud/micropython,redbear/micropython,torwag/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,neilh10/micropython,EcmaXp/micropython,xhat/micropython,blazewicz/micropython,alex-robbins/micropython,dxxb/micropython,oopy/micropython,trezor/micropython,martinribelotta/micropython,ganshun666/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,puuu/micropython,bvernoux/micropython,tobbad/micropython,mhoffma/micropython,emfcamp/micropython,infinnovation/micropython,danicampora/micropython,pramasoul/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,mhoffma/micropython,redbear/micropython,pozetroninc/micropython,dxxb/micropython,ernesto-g/micropython,lowRISC/micropython,swegener/micropython,EcmaXp/micropython,hosaka/micropython,alex-march/micropython,emfcamp/micropython,dinau/micropython,danicampora/micropython,chrisdearman/micropython,ryannathans/micropython,cwyark/micropython,trezor/micropython,MrSurly/micropython,adamkh/micropython,drrk/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,neilh10/micropython,trezor/micropython,HenrikSolver/micropython,mhoffma/micropython,toolmacher/micropython,deshipu/micropython,drrk/micropython,dmazzella/micropython,hosaka/micropython,tralamazza/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,tuc-osg/micropython,tobbad/micropython,kerneltask/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,lowRISC/micropython,praemdonck/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,trezor/micropython,MrSurly/micropython,cwyark/micropython,selste/micropython,adafruit/circuitpython,kerneltask/micropython,turbinenreiter/micropython,swegener/micropython,HenrikSolver/micropython,praemdonck/micropython,alex-march/micropython,drrk/micropython,adafruit/micropython,HenrikSolver/micropython,pfalcon/micropython,cwyark/micropython,adamkh/micropython,torwag/micropython,galenhz/micropython,bvernoux/micropython,tobbad/micropython,ernesto-g/micropython,hosaka/micropython,kerneltask/micropython,tralamazza/micropython,TDAbboud/micropython,jmarcelino/pycom-micropython,neilh10/micropython,danicampora/micropython,HenrikSolver/micropython,EcmaXp/micropython,mpalomer/micropython,xhat/micropython,deshipu/micropython,pramasoul/micropython,puuu/micropython,adafruit/circuitpython,blazewicz/micropython,neilh10/micropython,AriZuu/micropython,adamkh/micropython,hiway/micropython,hiway/micropython,supergis/micropython,pramasoul/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,toolmacher/micropython,danicampora/micropython,mianos/micropython,Timmenem/micropython,drrk/micropython,puuu/micropython,torwag/micropython,dinau/micropython,adamkh/micropython,adafruit/micropython,hiway/micropython,blazewicz/micropython,turbinenreiter/micropython,martinribelotta/micropython,puuu/micropython,AriZuu/micropython,emfcamp/micropython,oopy/micropython,redbear/micropython,PappaPeppar/micropython,infinnovation/micropython,chrisdearman/micropython,henriknelson/micropython,ganshun666/micropython,jmarcelino/pycom-micropython,Timmenem/micropython,bvernoux/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,mianos/micropython,supergis/micropython,mianos/micropython,dxxb/micropython,cwyark/micropython,lowRISC/micropython,MrSurly/micropython-esp32,misterdanb/micropython,hiway/micropython,PappaPeppar/micropython,chrisdearman/micropython,ryannathans/micropython,MrSurly/micropython,AriZuu/micropython,deshipu/micropython,tuc-osg/micropython,dmazzella/micropython,jmarcelino/pycom-micropython,ernesto-g/micropython,toolmacher/micropython,matthewelse/micropython,PappaPeppar/micropython,deshipu/micropython,tuc-osg/micropython,Peetz0r/micropython-esp32,bvernoux/micropython,ryannathans/micropython,supergis/micropython
|
# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
tests: Add min/max "default" agrument test
|
# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
# 'default' tests
print(min([1, 2, 3, 4, 5], default=-1))
print(min([], default=-1))
print(max([1, 2, 3, 4, 5], default=-1))
print(max([], default=-1))
|
<commit_before># test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
<commit_msg>tests: Add min/max "default" agrument test<commit_after>
|
# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
# 'default' tests
print(min([1, 2, 3, 4, 5], default=-1))
print(min([], default=-1))
print(max([1, 2, 3, 4, 5], default=-1))
print(max([], default=-1))
|
# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
tests: Add min/max "default" agrument test# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
# 'default' tests
print(min([1, 2, 3, 4, 5], default=-1))
print(min([], default=-1))
print(max([1, 2, 3, 4, 5], default=-1))
print(max([], default=-1))
|
<commit_before># test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
<commit_msg>tests: Add min/max "default" agrument test<commit_after># test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
# 'default' tests
print(min([1, 2, 3, 4, 5], default=-1))
print(min([], default=-1))
print(max([1, 2, 3, 4, 5], default=-1))
print(max([], default=-1))
|
d90d91906981a4393810069b494d68230f17439e
|
frameworks/Scala/spray/setup.py
|
frameworks/Scala/spray/setup.py
|
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("../sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
time.sleep(5)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
|
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("$FWROOT/sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
|
Enable spray to find sbt
|
Enable spray to find sbt
|
Python
|
bsd-3-clause
|
zane-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Verber/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zapov/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sgml/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sgml/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,testn/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,valyala/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,testn/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,khellang/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,testn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,grob/FrameworkBenchmarks,torhve/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,testn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,methane/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,denkab/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sgml/FrameworkBenchmarks,testn/FrameworkBenchmarks,actframework/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sgml/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Verber/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sgml/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,valyala/FrameworkBenchmarks,testn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,torhve/FrameworkBenchmarks,actframework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,valyala/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jamming/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,joshk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,actframework/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,valyala/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,torhve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zapov/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,grob/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zapov/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,denkab/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Verber/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,valyala/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,valyala/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,grob/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,testn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,methane/FrameworkBenchmarks,jamming/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,sxend/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,grob/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,doom369/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,denkab/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,valyala/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,grob/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sxend/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Verber/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zapov/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Verber/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zloster/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,denkab/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,torhve/FrameworkBenchmarks,khellang/FrameworkBenchmarks,methane/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,testn/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,doom369/FrameworkBenchmarks,khellang/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Verber/FrameworkBenchmarks,khellang/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Verber/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,khellang/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zloster/FrameworkBenchmarks,methane/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,testn/FrameworkBenchmarks,testn/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,testn/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,methane/FrameworkBenchmarks,Verber/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,denkab/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,joshk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,valyala/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,grob/FrameworkBenchmarks,herloct/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,testn/FrameworkBenchmarks,torhve/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,methane/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,grob/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,methane/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Verber/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,methane/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sgml/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zloster/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Verber/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,grob/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,joshk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,joshk/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,actframework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,methane/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zapov/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,testn/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,khellang/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,herloct/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,herloct/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,doom369/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sgml/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,valyala/FrameworkBenchmarks,grob/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,methane/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,grob/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,denkab/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zloster/FrameworkBenchmarks,torhve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,methane/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,torhve/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,testn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sxend/FrameworkBenchmarks,joshk/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jamming/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,herloct/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,joshk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks
|
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("../sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
time.sleep(5)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
Enable spray to find sbt
|
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("$FWROOT/sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
|
<commit_before>
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("../sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
time.sleep(5)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
<commit_msg>Enable spray to find sbt<commit_after>
|
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("$FWROOT/sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
|
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("../sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
time.sleep(5)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
Enable spray to find sbt
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("$FWROOT/sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
|
<commit_before>
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("../sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
time.sleep(5)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
<commit_msg>Enable spray to find sbt<commit_after>
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("$FWROOT/sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
|
6551c882745b13d5b9be183e83f379e34b067921
|
tests/test_emailharvesterws.py
|
tests/test_emailharvesterws.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
import pytest
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
print(emails_found)
|
Revert "Fix a codacy issue"
|
Revert "Fix a codacy issue"
This reverts commit 0fe83f1bfa54eda16c42fb5d81b33215dc3ba562.
|
Python
|
mit
|
avidot/Botanick
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
Revert "Fix a codacy issue"
This reverts commit 0fe83f1bfa54eda16c42fb5d81b33215dc3ba562.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
import pytest
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
print(emails_found)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
<commit_msg>Revert "Fix a codacy issue"
This reverts commit 0fe83f1bfa54eda16c42fb5d81b33215dc3ba562.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
import pytest
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
print(emails_found)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
Revert "Fix a codacy issue"
This reverts commit 0fe83f1bfa54eda16c42fb5d81b33215dc3ba562.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
import pytest
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
print(emails_found)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
<commit_msg>Revert "Fix a codacy issue"
This reverts commit 0fe83f1bfa54eda16c42fb5d81b33215dc3ba562.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
import pytest
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
print(emails_found)
|
2666eee0a59581c504b36acd618e256cf313c377
|
start_server.py
|
start_server.py
|
import os
def start_server():
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
|
import os
def start_server():
count = 0
while count < 2
send_ssh_server_start(count)
count +=1
exit()
def send_ssh_server_start(count):
try:
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
except:
sleep(count + 1)
|
Add logic to try server 3 times, pausing a little more each time.
|
Add logic to try server 3 times, pausing a little more each time.
|
Python
|
mit
|
jwarshaw/RaspberryDrive
|
import os
def start_server():
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
Add logic to try server 3 times, pausing a little more each time.
|
import os
def start_server():
count = 0
while count < 2
send_ssh_server_start(count)
count +=1
exit()
def send_ssh_server_start(count):
try:
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
except:
sleep(count + 1)
|
<commit_before>import os
def start_server():
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
<commit_msg>Add logic to try server 3 times, pausing a little more each time.<commit_after>
|
import os
def start_server():
count = 0
while count < 2
send_ssh_server_start(count)
count +=1
exit()
def send_ssh_server_start(count):
try:
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
except:
sleep(count + 1)
|
import os
def start_server():
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
Add logic to try server 3 times, pausing a little more each time.import os
def start_server():
count = 0
while count < 2
send_ssh_server_start(count)
count +=1
exit()
def send_ssh_server_start(count):
try:
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
except:
sleep(count + 1)
|
<commit_before>import os
def start_server():
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
<commit_msg>Add logic to try server 3 times, pausing a little more each time.<commit_after>import os
def start_server():
count = 0
while count < 2
send_ssh_server_start(count)
count +=1
exit()
def send_ssh_server_start(count):
try:
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
except:
sleep(count + 1)
|
2849499a076b6997f8e3c7b76103df94f50ac6c3
|
python/src/setup.py
|
python/src/setup.py
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.16.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
Increment PyPi package to 1.9.16.1
|
Increment PyPi package to 1.9.16.1
|
Python
|
apache-2.0
|
talele08/appengine-mapreduce,talele08/appengine-mapreduce,soundofjw/appengine-mapreduce,westerhofffl/appengine-mapreduce,westerhofffl/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,westerhofffl/appengine-mapreduce,soundofjw/appengine-mapreduce,soundofjw/appengine-mapreduce,VirusTotal/appengine-mapreduce,Candreas/mapreduce,potatolondon/potato-mapreduce,bmenasha/appengine-mapreduce,talele08/appengine-mapreduce,vendasta/appengine-mapreduce,lordzuko/appengine-mapreduce,aozarov/appengine-mapreduce,rbruyere/appengine-mapreduce,ankit318/appengine-mapreduce,bmenasha/appengine-mapreduce,VirusTotal/appengine-mapreduce,bmenasha/appengine-mapreduce,rbruyere/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,westerhofffl/appengine-mapreduce,chargrizzle/appengine-mapreduce,rbruyere/appengine-mapreduce,lordzuko/appengine-mapreduce,VirusTotal/appengine-mapreduce,vendasta/appengine-mapreduce,chargrizzle/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,Candreas/mapreduce,chargrizzle/appengine-mapreduce,lordzuko/appengine-mapreduce,bmenasha/appengine-mapreduce,aozarov/appengine-mapreduce,mikelambert/appengine-mapreduce,lordzuko/appengine-mapreduce,vendasta/appengine-mapreduce,mikelambert/appengine-mapreduce,chargrizzle/appengine-mapreduce,westerhofffl/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,rbruyere/appengine-mapreduce,GoogleCloudPlatform/appengine-mapreduce,potatolondon/potato-mapreduce,vendasta/appengine-mapreduce,aozarov/appengine-mapreduce,rbruyere/appengine-mapreduce,vendasta/appengine-mapreduce,bmenasha/appengine-mapreduce,potatolondon/potato-mapreduce,ankit318/appengine-mapreduce,mikelambert/appengine-mapreduce,Candreas/mapreduce,talele08/appengine-mapreduce,talele08/appengine-mapreduce,lordzuko/appengine-mapreduce,Candreas/mapreduce,mikelambert/appengine-mapreduce,chargrizzle/appengine-mapreduce,ankit318/appengine-mapreduce,aozarov/appengine-mapreduce,Candreas/mapreduce,aozarov/appengine-mapreduce,soundofjw/appengine-mapreduce,VirusTotal/appengine-mapreduce,ankit318/appengine-mapreduce,mikelambert/appengine-mapreduce,VirusTotal/appengine-mapreduce,soundofjw/appengine-mapreduce,ankit318/appengine-mapreduce
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
Increment PyPi package to 1.9.16.1
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.16.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
<commit_before>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
<commit_msg>Increment PyPi package to 1.9.16.1<commit_after>
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.16.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
Increment PyPi package to 1.9.16.1#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.16.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
<commit_before>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.15.0",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
<commit_msg>Increment PyPi package to 1.9.16.1<commit_after>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing MR lib."""
import distribute_setup
# User may not have setuptools installed on their machines.
# This script will automatically install the right version from PyPI.
distribute_setup.use_setuptools()
# pylint: disable=g-import-not-at-top
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEngineMapReduce",
version="1.9.16.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine mapreduce data processing",
url="https://code.google.com/p/appengine-mapreduce/",
license="Apache License 2.0",
description=("Enable MapReduce style data processing on "
"App Engine"),
zip_safe=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.15",
"GoogleAppEnginePipeline >= 1.9.15",
"Graphy >= 1.0.0",
"simplejson >= 3.6.5",
"mock >= 1.0.1",
"mox >= 0.5.3",
]
)
|
72205981af062258c4cf75c4323aa3e4d2859bb8
|
pelicanconf.py
|
pelicanconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = ()
# Social widget
SOCIAL = [
('calendar', '/archives.html'),
('tags', '/tags.html'),
('email', 'sio.wtf@gmail.com'),
('github', 'https://github.com/sio'),
]
DEFAULT_PAGINATION = 6
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
Replace default links and values
|
Replace default links and values
|
Python
|
apache-2.0
|
sio/potyarkin.ml,sio/potyarkin.ml
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
Replace default links and values
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = ()
# Social widget
SOCIAL = [
('calendar', '/archives.html'),
('tags', '/tags.html'),
('email', 'sio.wtf@gmail.com'),
('github', 'https://github.com/sio'),
]
DEFAULT_PAGINATION = 6
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
<commit_msg>Replace default links and values<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = ()
# Social widget
SOCIAL = [
('calendar', '/archives.html'),
('tags', '/tags.html'),
('email', 'sio.wtf@gmail.com'),
('github', 'https://github.com/sio'),
]
DEFAULT_PAGINATION = 6
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
Replace default links and values#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = ()
# Social widget
SOCIAL = [
('calendar', '/archives.html'),
('tags', '/tags.html'),
('email', 'sio.wtf@gmail.com'),
('github', 'https://github.com/sio'),
]
DEFAULT_PAGINATION = 6
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
<commit_msg>Replace default links and values<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = ()
# Social widget
SOCIAL = [
('calendar', '/archives.html'),
('tags', '/tags.html'),
('email', 'sio.wtf@gmail.com'),
('github', 'https://github.com/sio'),
]
DEFAULT_PAGINATION = 6
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
857124a12f10e3954c114c2b6b688857b80a77a5
|
Spectrum.py
|
Spectrum.py
|
#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
test = Spectrum()
print(test)
print(test.pixel, test.flux)
test.pixel = x
test.flux = y
print(test)
print(test.pixel, test.flux, test.wavelength)
test2 = Spectrum(x, flux=y)
print(test2)
print(test2.pixel, test.flux)
print(test2.wavelength)
|
#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
|
Remove simple testing from inside class module
|
Remove simple testing from inside class module
|
Python
|
mit
|
jason-neal/spectrum_overload,jason-neal/spectrum_overload,jason-neal/spectrum_overload
|
#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
test = Spectrum()
print(test)
print(test.pixel, test.flux)
test.pixel = x
test.flux = y
print(test)
print(test.pixel, test.flux, test.wavelength)
test2 = Spectrum(x, flux=y)
print(test2)
print(test2.pixel, test.flux)
print(test2.wavelength)Remove simple testing from inside class module
|
#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
|
<commit_before>#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
test = Spectrum()
print(test)
print(test.pixel, test.flux)
test.pixel = x
test.flux = y
print(test)
print(test.pixel, test.flux, test.wavelength)
test2 = Spectrum(x, flux=y)
print(test2)
print(test2.pixel, test.flux)
print(test2.wavelength)<commit_msg>Remove simple testing from inside class module<commit_after>
|
#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
|
#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
test = Spectrum()
print(test)
print(test.pixel, test.flux)
test.pixel = x
test.flux = y
print(test)
print(test.pixel, test.flux, test.wavelength)
test2 = Spectrum(x, flux=y)
print(test2)
print(test2.pixel, test.flux)
print(test2.wavelength)Remove simple testing from inside class module#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
|
<commit_before>#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
test = Spectrum()
print(test)
print(test.pixel, test.flux)
test.pixel = x
test.flux = y
print(test)
print(test.pixel, test.flux, test.wavelength)
test2 = Spectrum(x, flux=y)
print(test2)
print(test2.pixel, test.flux)
print(test2.wavelength)<commit_msg>Remove simple testing from inside class module<commit_after>#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
|
530f67493ba0d044a0896aff39bdab2ea5f1cf15
|
__init__.py
|
__init__.py
|
from openerp.osv import orm
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
|
from openerp.osv import orm
from openerp.osv import osv
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
|
Add osv method from openerp
|
Add osv method from openerp
|
Python
|
agpl-3.0
|
xcgd/oemetasl
|
from openerp.osv import orm
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
Add osv method from openerp
|
from openerp.osv import orm
from openerp.osv import osv
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
|
<commit_before>from openerp.osv import orm
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
<commit_msg>Add osv method from openerp<commit_after>
|
from openerp.osv import orm
from openerp.osv import osv
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
|
from openerp.osv import orm
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
Add osv method from openerpfrom openerp.osv import orm
from openerp.osv import osv
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
|
<commit_before>from openerp.osv import orm
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
<commit_msg>Add osv method from openerp<commit_after>from openerp.osv import orm
from openerp.osv import osv
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
|
709017ea46cd3784983ef0ee64cfe608aa44cf0c
|
tests/integration/aiohttp_utils.py
|
tests/integration/aiohttp_utils.py
|
import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json() # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
|
import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', encoding='utf-8', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json(encoding=encoding) # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
|
Fix aiohttp utils to pass encondig to response.json
|
Fix aiohttp utils to pass encondig to response.json
|
Python
|
mit
|
graingert/vcrpy,graingert/vcrpy,kevin1024/vcrpy,kevin1024/vcrpy
|
import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json() # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
Fix aiohttp utils to pass encondig to response.json
|
import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', encoding='utf-8', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json(encoding=encoding) # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
|
<commit_before>import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json() # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
<commit_msg>Fix aiohttp utils to pass encondig to response.json<commit_after>
|
import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', encoding='utf-8', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json(encoding=encoding) # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
|
import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json() # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
Fix aiohttp utils to pass encondig to response.jsonimport asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', encoding='utf-8', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json(encoding=encoding) # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
|
<commit_before>import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json() # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
<commit_msg>Fix aiohttp utils to pass encondig to response.json<commit_after>import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', encoding='utf-8', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json(encoding=encoding) # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
|
2a3fe3b5e08c91ab8d77569b02b36da63909f619
|
pysnmp/hlapi/v1arch/asyncore/sync/__init__.py
|
pysnmp/hlapi/v1arch/asyncore/sync/__init__.py
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
try:
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
except SyntaxError:
from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
|
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
|
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
|
Python
|
bsd-2-clause
|
etingof/pysnmp,etingof/pysnmp
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
try:
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
except SyntaxError:
from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
|
<commit_before>#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
try:
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
except SyntaxError:
from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
<commit_msg>Remove the remnants of hlapi.v1arch.asyncore.sync.compat<commit_after>
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
try:
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
except SyntaxError:
from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
Remove the remnants of hlapi.v1arch.asyncore.sync.compat#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
|
<commit_before>#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
try:
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
except SyntaxError:
from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
<commit_msg>Remove the remnants of hlapi.v1arch.asyncore.sync.compat<commit_after>#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
|
674721b9b094fe7e63d3356cf76e7eec0cb9bb62
|
employees/serializers.py
|
employees/serializers.py
|
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
|
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score',
'last_month_score',
'current_month_score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
|
Add current_month_score and last_month_score to EmployeeListSerializer
|
Add current_month_score and last_month_score to EmployeeListSerializer
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
Add current_month_score and last_month_score to EmployeeListSerializer
|
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score',
'last_month_score',
'current_month_score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
|
<commit_before>from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
<commit_msg>Add current_month_score and last_month_score to EmployeeListSerializer<commit_after>
|
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score',
'last_month_score',
'current_month_score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
|
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
Add current_month_score and last_month_score to EmployeeListSerializerfrom .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score',
'last_month_score',
'current_month_score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
|
<commit_before>from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
<commit_msg>Add current_month_score and last_month_score to EmployeeListSerializer<commit_after>from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score',
'last_month_score',
'current_month_score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
|
25e7b4a2e297e9944b5065851c6e65eb40b11bcd
|
scripts/examples/OpenMV/99-Tests/unittests.py
|
scripts/examples/OpenMV/99-Tests/unittests.py
|
# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, passed):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + ("PASSED" if passed == True else "FAILED"))
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_passed = True
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_passed = False
print_result(test, test_passed)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
|
# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, result):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + result)
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_result = "PASSED"
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_result = "DISABLED" if "unavailable" in str(e) else "FAILED"
print_result(test, test_result)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
|
Update unittest to ignore disabled functions.
|
Update unittest to ignore disabled functions.
|
Python
|
mit
|
kwagyeman/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv,openmv/openmv,iabdalkader/openmv,openmv/openmv
|
# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, passed):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + ("PASSED" if passed == True else "FAILED"))
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_passed = True
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_passed = False
print_result(test, test_passed)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
Update unittest to ignore disabled functions.
|
# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, result):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + result)
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_result = "PASSED"
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_result = "DISABLED" if "unavailable" in str(e) else "FAILED"
print_result(test, test_result)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
|
<commit_before># OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, passed):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + ("PASSED" if passed == True else "FAILED"))
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_passed = True
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_passed = False
print_result(test, test_passed)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
<commit_msg>Update unittest to ignore disabled functions.<commit_after>
|
# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, result):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + result)
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_result = "PASSED"
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_result = "DISABLED" if "unavailable" in str(e) else "FAILED"
print_result(test, test_result)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
|
# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, passed):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + ("PASSED" if passed == True else "FAILED"))
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_passed = True
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_passed = False
print_result(test, test_passed)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
Update unittest to ignore disabled functions.# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, result):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + result)
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_result = "PASSED"
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_result = "DISABLED" if "unavailable" in str(e) else "FAILED"
print_result(test, test_result)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
|
<commit_before># OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, passed):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + ("PASSED" if passed == True else "FAILED"))
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_passed = True
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_passed = False
print_result(test, test_passed)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
<commit_msg>Update unittest to ignore disabled functions.<commit_after># OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, result):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + result)
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_result = "PASSED"
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_result = "DISABLED" if "unavailable" in str(e) else "FAILED"
print_result(test, test_result)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
|
de228621deb5637ab0698ca23cf63ece46c5ddee
|
task/views.py
|
task/views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Task.objects.all()
serializer_class = TaskSerializer
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from django.db.models import Q
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
serializer_class = TaskSerializer
def get_queryset(self):
"""
This view should return a list of all the tasks
for the currently logged user.
"""
user = self.request.user
task_object = Task.objects.filter(Q(created_by=user) | Q(assigned_to = user))
return task_object
|
Adjust the APIView query_set to return tasks created or assigned to the currently logged user
|
Adjust the APIView query_set to return tasks created or assigned to the currently logged user
|
Python
|
apache-2.0
|
toladata/TolaProfile,toladata/TolaProfile,toladata/TolaProfile,toladata/TolaProfile
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Task.objects.all()
serializer_class = TaskSerializer
Adjust the APIView query_set to return tasks created or assigned to the currently logged user
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from django.db.models import Q
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
serializer_class = TaskSerializer
def get_queryset(self):
"""
This view should return a list of all the tasks
for the currently logged user.
"""
user = self.request.user
task_object = Task.objects.filter(Q(created_by=user) | Q(assigned_to = user))
return task_object
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Task.objects.all()
serializer_class = TaskSerializer
<commit_msg>Adjust the APIView query_set to return tasks created or assigned to the currently logged user<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from django.db.models import Q
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
serializer_class = TaskSerializer
def get_queryset(self):
"""
This view should return a list of all the tasks
for the currently logged user.
"""
user = self.request.user
task_object = Task.objects.filter(Q(created_by=user) | Q(assigned_to = user))
return task_object
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Task.objects.all()
serializer_class = TaskSerializer
Adjust the APIView query_set to return tasks created or assigned to the currently logged user# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from django.db.models import Q
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
serializer_class = TaskSerializer
def get_queryset(self):
"""
This view should return a list of all the tasks
for the currently logged user.
"""
user = self.request.user
task_object = Task.objects.filter(Q(created_by=user) | Q(assigned_to = user))
return task_object
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Task.objects.all()
serializer_class = TaskSerializer
<commit_msg>Adjust the APIView query_set to return tasks created or assigned to the currently logged user<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from django.db.models import Q
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
serializer_class = TaskSerializer
def get_queryset(self):
"""
This view should return a list of all the tasks
for the currently logged user.
"""
user = self.request.user
task_object = Task.objects.filter(Q(created_by=user) | Q(assigned_to = user))
return task_object
|
da91f170c106c46a0d858e887220bc691066cdaa
|
tests/dtypes_test.py
|
tests/dtypes_test.py
|
from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes)
some_dtypes = [np.float64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
|
from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all()
|
Update of the dtypes unit-test.
|
Update of the dtypes unit-test.
|
Python
|
mit
|
maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex
|
from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes)
some_dtypes = [np.float64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
Update of the dtypes unit-test.
|
from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all()
|
<commit_before>from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes)
some_dtypes = [np.float64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
<commit_msg>Update of the dtypes unit-test.<commit_after>
|
from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all()
|
from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes)
some_dtypes = [np.float64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
Update of the dtypes unit-test.from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all()
|
<commit_before>from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes)
some_dtypes = [np.float64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
<commit_msg>Update of the dtypes unit-test.<commit_after>from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all()
|
be77248c56b71ca3c5240ec55676d08227a1f526
|
api/settings.py
|
api/settings.py
|
# -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
print(MONGODB_DB)
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
print(MONGODB_HOST)
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
|
# -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
SECRET_KEY = str(os.environ.get('SECRET_KEY'))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
|
Add placeholder for secret key
|
Add placeholder for secret key
|
Python
|
mit
|
jaredmichaelsmith/grove,jaredmichaelsmith/grove
|
# -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
print(MONGODB_DB)
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
print(MONGODB_HOST)
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
Add placeholder for secret key
|
# -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
SECRET_KEY = str(os.environ.get('SECRET_KEY'))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
|
<commit_before># -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
print(MONGODB_DB)
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
print(MONGODB_HOST)
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
<commit_msg>Add placeholder for secret key<commit_after>
|
# -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
SECRET_KEY = str(os.environ.get('SECRET_KEY'))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
|
# -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
print(MONGODB_DB)
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
print(MONGODB_HOST)
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
Add placeholder for secret key# -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
SECRET_KEY = str(os.environ.get('SECRET_KEY'))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
|
<commit_before># -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
print(MONGODB_DB)
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
print(MONGODB_HOST)
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
<commit_msg>Add placeholder for secret key<commit_after># -*- coding: utf-8 -*-
"""Application configuration."""
import os
class Config(object):
"""Base configuration."""
DEBUG = False
TESTING = False
APP_DIR = os.path.abspath(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
SECRET_KEY = str(os.environ.get('SECRET_KEY'))
# Local Settings
"""MONGODB_DB = os.environ.get('WHERENO_DB', 'whereno')
MONGODB_HOST = os.environ.get('WHERENO_HOST', 'localhost')
MONGODB_PORT = os.environ.get('WHERENO_PORT', 27017)
MONGODB_USERNAME = os.environ.get('WHERENO_USERNAME', 'whereno')
MONGODB_PASSWORD = os.environ.get('WHERENO_PASSWORD', 'whereno')"""
# Cloud Settings
MONGODB_DB = str(os.environ.get('MONGODB_DB'))
MONGODB_HOST = str(os.environ.get('MONGODB_HOST'))
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
TESTING = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
TESTING = True
CACHE_TYPE = 'simple'
BCRYPT_LOG_ROUNDS = 4
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
BCRYPT_LOG_ROUNDS = 4
|
877d90ff064b70a7ce861ed66082c4d703170fed
|
scripts/python/cgi-bin/resource.py
|
scripts/python/cgi-bin/resource.py
|
#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": %s},
"humidity": {"value": %f, "timestamp": %s}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
|
#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": "%s"},
"humidity": {"value": %f, "timestamp": "%s"}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
|
Update timestamp to be a string
|
Update timestamp to be a string
|
Python
|
apache-2.0
|
projectcs13/sensor-cloud,projectcs13/sensor-cloud,EricssonResearch/iot-framework-engine,EricssonResearch/iot-framework-engine,projectcs13/sensor-cloud,EricssonResearch/iot-framework-engine,EricssonResearch/iot-framework-engine
|
#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": %s},
"humidity": {"value": %f, "timestamp": %s}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
Update timestamp to be a string
|
#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": "%s"},
"humidity": {"value": %f, "timestamp": "%s"}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
|
<commit_before>#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": %s},
"humidity": {"value": %f, "timestamp": %s}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
<commit_msg>Update timestamp to be a string<commit_after>
|
#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": "%s"},
"humidity": {"value": %f, "timestamp": "%s"}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
|
#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": %s},
"humidity": {"value": %f, "timestamp": %s}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
Update timestamp to be a string#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": "%s"},
"humidity": {"value": %f, "timestamp": "%s"}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
|
<commit_before>#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": %s},
"humidity": {"value": %f, "timestamp": %s}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
<commit_msg>Update timestamp to be a string<commit_after>#!/usr/bin/env python
import datetime
import random
timestamp = datetime.datetime.now()
humidity = random.random()
temperature = random.random() * 100
print "Content-Type: application/json"
print
print """\
{"resource": "polling-resource",
"streams":
{
"temperature": {"value": %f, "timestamp": "%s"},
"humidity": {"value": %f, "timestamp": "%s"}
}
}
""" % (temperature, timestamp.strftime("%Y-%m-%d %H:%M:%S"), humidity, timestamp.strftime("%Y-%m-%d %H:%M:%S"))
|
e55ccadb0954eaa66526dc6f112b5eac54a51ab3
|
calc/__init__.py
|
calc/__init__.py
|
"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Note:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
|
"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Rabbit hole:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
|
Mark relative imports as a rabbit hole discussion
|
Mark relative imports as a rabbit hole discussion
|
Python
|
isc
|
bike-barn/red-green-refactor
|
"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Note:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
Mark relative imports as a rabbit hole discussion
|
"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Rabbit hole:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
|
<commit_before>"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Note:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
<commit_msg>Mark relative imports as a rabbit hole discussion<commit_after>
|
"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Rabbit hole:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
|
"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Note:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
Mark relative imports as a rabbit hole discussion"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Rabbit hole:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
|
<commit_before>"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Note:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
<commit_msg>Mark relative imports as a rabbit hole discussion<commit_after>"""
_
___ __ _ | | ___
/ __|/ _` || | / __|
| (__| (_| || || (__
\___|\__,_||_| \___|
Rabbit hole:
The relative imports here (noted by the . prefix) are done as a convenience
so that the consumers of the ``calc`` package can directly use objects
belonging to the ``calc.calc`` module. Essentially this enables the consumer
to do
>>> from calc import INTEGER
instead of having to use the slightly longer
>>> from calc.calc import INTEGER
"""
__author__ = 'Reilly Tucker Siemens, Alex LordThorsen'
__email__ = 'reilly@tuckersiemens.com, alexlordthorsen@gmail.com'
__version__ = '0.1.0'
from .calc import Calc, CalcError
from .token import INTEGER, EOF, PLUS, Token
|
db2d8da9109ab4a8aa51acbd80abb2088a7fd299
|
campus02/urls.py
|
campus02/urls.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django.contrib.auth.urls')),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
Rearrange admin URL mount point.
|
Rearrange admin URL mount point.
|
Python
|
mit
|
fladi/django-campus02,fladi/django-campus02
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django.contrib.auth.urls')),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
Rearrange admin URL mount point.
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django.contrib.auth.urls')),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
<commit_msg>Rearrange admin URL mount point.<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django.contrib.auth.urls')),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
Rearrange admin URL mount point.#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django.contrib.auth.urls')),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
<commit_msg>Rearrange admin URL mount point.<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
e112fedcc11ec12d1a669b47a223b4363eeb27e0
|
virustotal/server.py
|
virustotal/server.py
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score'
'FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score"
"FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score")
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
Fix typo in previous commit
|
Fix typo in previous commit
|
Python
|
mit
|
enricobacis/playscraper
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score'
'FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
Fix typo in previous commit
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score"
"FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score")
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
<commit_before>#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score'
'FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
<commit_msg>Fix typo in previous commit<commit_after>
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score"
"FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score")
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score'
'FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
Fix typo in previous commit#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score"
"FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score")
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
<commit_before>#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute('SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score'
'FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score')
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
<commit_msg>Fix typo in previous commit<commit_after>#!/usr/bin/env python
from contextlib import closing
from sqlite3 import connect
from os.path import isfile
from bottle import request, template, route, run
@route('/virustotal/<db>')
def virus(db):
if not isfile(db):
return 'The database does not exist: "%s"' % db
with connect(db, timeout=10) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute("SELECT count(*), CAST(0.999 + detected * 10 AS INT) || '0%' AS score"
"FROM virus WHERE detected >= 0 GROUP BY score ORDER BY score")
return template('virustotal', title=db, cursor=cursor, refresh=request.query.refresh)
run(host='0.0.0.0')
|
8226571dc97230a486a3b59c8752411e038f04ee
|
openprescribing/matrixstore/tests/matrixstore_factory.py
|
openprescribing/matrixstore/tests/matrixstore_factory.py
|
import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
connection = sqlite3.connect(":memory:")
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
|
import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
# We need this connection to be sharable across threads because
# LiveServerTestCase runs in a separate thread from the main test code
connection = sqlite3.connect(":memory:", check_same_thread=False)
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
|
Fix MatrixStore test patching to work with LiveServerTestCase
|
Fix MatrixStore test patching to work with LiveServerTestCase
|
Python
|
mit
|
annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing
|
import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
connection = sqlite3.connect(":memory:")
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
Fix MatrixStore test patching to work with LiveServerTestCase
|
import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
# We need this connection to be sharable across threads because
# LiveServerTestCase runs in a separate thread from the main test code
connection = sqlite3.connect(":memory:", check_same_thread=False)
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
|
<commit_before>import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
connection = sqlite3.connect(":memory:")
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
<commit_msg>Fix MatrixStore test patching to work with LiveServerTestCase<commit_after>
|
import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
# We need this connection to be sharable across threads because
# LiveServerTestCase runs in a separate thread from the main test code
connection = sqlite3.connect(":memory:", check_same_thread=False)
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
|
import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
connection = sqlite3.connect(":memory:")
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
Fix MatrixStore test patching to work with LiveServerTestCaseimport mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
# We need this connection to be sharable across threads because
# LiveServerTestCase runs in a separate thread from the main test code
connection = sqlite3.connect(":memory:", check_same_thread=False)
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
|
<commit_before>import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
connection = sqlite3.connect(":memory:")
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
<commit_msg>Fix MatrixStore test patching to work with LiveServerTestCase<commit_after>import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
# We need this connection to be sharable across threads because
# LiveServerTestCase runs in a separate thread from the main test code
connection = sqlite3.connect(":memory:", check_same_thread=False)
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
|
31ce7c5c264e7648427f73b51cd966165e63ec23
|
beaver/redis_transport.py
|
beaver/redis_transport.py
|
import datetime
import redis
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self.redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
|
Allow for initial connection lag. Helpful when waiting for an SSH proxy to connect
|
Allow for initial connection lag. Helpful when waiting for an SSH proxy to connect
|
Python
|
mit
|
doghrim/python-beaver,Appdynamics/beaver,josegonzalez/python-beaver,doghrim/python-beaver,jlambert121/beaver,davidmoravek/python-beaver,josegonzalez/python-beaver,imacube/python-beaver,PierreF/beaver,zuazo-forks/beaver,zuazo-forks/beaver,thomasalrin/beaver,python-beaver/python-beaver,PierreF/beaver,rajmarndi/python-beaver,Open-Party/python-beaver,Appdynamics/beaver,Open-Party/python-beaver,davidmoravek/python-beaver,thomasalrin/beaver,imacube/python-beaver,timstoop/python-beaver,jlambert121/beaver,timstoop/python-beaver,rajmarndi/python-beaver,python-beaver/python-beaver
|
import datetime
import redis
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
Allow for initial connection lag. Helpful when waiting for an SSH proxy to connect
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self.redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
|
<commit_before>import datetime
import redis
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
<commit_msg>Allow for initial connection lag. Helpful when waiting for an SSH proxy to connect<commit_after>
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self.redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
|
import datetime
import redis
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
Allow for initial connection lag. Helpful when waiting for an SSH proxy to connectimport datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self.redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
|
<commit_before>import datetime
import redis
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
<commit_msg>Allow for initial connection lag. Helpful when waiting for an SSH proxy to connect<commit_after>import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self.redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
|
597a2ec7a6ff0bae0b43a67e8be675017fd1d7f1
|
falafel/mappers/tests/test_current_clocksource.py
|
falafel/mappers/tests/test_current_clocksource.py
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
|
Enhance coverage of current_closcksource to 100%
|
Enhance coverage of current_closcksource to 100%
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
Enhance coverage of current_closcksource to 100%
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
|
<commit_before>from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
<commit_msg>Enhance coverage of current_closcksource to 100%<commit_after>
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
Enhance coverage of current_closcksource to 100%from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
|
<commit_before>from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
<commit_msg>Enhance coverage of current_closcksource to 100%<commit_after>from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
|
196fe935afd6adfec5d205e88472d7ef607b4743
|
checkout.py
|
checkout.py
|
__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(repo, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
("git clone %s"%repo),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: %s"%line)
loop = asyncio.get_event_loop()
task = async_exec(sys.argv[1], test_callback)
loop.run_until_complete(task)
|
__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(command, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
(command),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: '%s'"%line)
loop = asyncio.get_event_loop()
tasks = []
for command in sys.argv[1:]:
task = async_exec(command, test_callback)
tasks.append(task)
loop.run_until_complete(asyncio.wait(tasks))
# Test with
# PATH=$PWD/../bde-tools/bin:$PATH python3 ~/PycharmProjects/python_experiments/checkout.py \
# "export WAFLOCK=.waf-lock-dbg_exc_mt BDE_WAF_UFID=dbg_exc_mt; waf configure build --target=bsl" \
# "export WAFLOCK=.waf-lock-opt_exc_mt BDE_WAF_UFID=opt_exc_mt; waf configure build --target=bsl"
|
Add comment showing parallel waf invocation
|
Add comment showing parallel waf invocation
|
Python
|
apache-2.0
|
RMGiroux/python_experiments
|
__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(repo, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
("git clone %s"%repo),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: %s"%line)
loop = asyncio.get_event_loop()
task = async_exec(sys.argv[1], test_callback)
loop.run_until_complete(task)Add comment showing parallel waf invocation
|
__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(command, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
(command),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: '%s'"%line)
loop = asyncio.get_event_loop()
tasks = []
for command in sys.argv[1:]:
task = async_exec(command, test_callback)
tasks.append(task)
loop.run_until_complete(asyncio.wait(tasks))
# Test with
# PATH=$PWD/../bde-tools/bin:$PATH python3 ~/PycharmProjects/python_experiments/checkout.py \
# "export WAFLOCK=.waf-lock-dbg_exc_mt BDE_WAF_UFID=dbg_exc_mt; waf configure build --target=bsl" \
# "export WAFLOCK=.waf-lock-opt_exc_mt BDE_WAF_UFID=opt_exc_mt; waf configure build --target=bsl"
|
<commit_before>__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(repo, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
("git clone %s"%repo),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: %s"%line)
loop = asyncio.get_event_loop()
task = async_exec(sys.argv[1], test_callback)
loop.run_until_complete(task)<commit_msg>Add comment showing parallel waf invocation<commit_after>
|
__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(command, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
(command),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: '%s'"%line)
loop = asyncio.get_event_loop()
tasks = []
for command in sys.argv[1:]:
task = async_exec(command, test_callback)
tasks.append(task)
loop.run_until_complete(asyncio.wait(tasks))
# Test with
# PATH=$PWD/../bde-tools/bin:$PATH python3 ~/PycharmProjects/python_experiments/checkout.py \
# "export WAFLOCK=.waf-lock-dbg_exc_mt BDE_WAF_UFID=dbg_exc_mt; waf configure build --target=bsl" \
# "export WAFLOCK=.waf-lock-opt_exc_mt BDE_WAF_UFID=opt_exc_mt; waf configure build --target=bsl"
|
__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(repo, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
("git clone %s"%repo),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: %s"%line)
loop = asyncio.get_event_loop()
task = async_exec(sys.argv[1], test_callback)
loop.run_until_complete(task)Add comment showing parallel waf invocation__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(command, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
(command),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: '%s'"%line)
loop = asyncio.get_event_loop()
tasks = []
for command in sys.argv[1:]:
task = async_exec(command, test_callback)
tasks.append(task)
loop.run_until_complete(asyncio.wait(tasks))
# Test with
# PATH=$PWD/../bde-tools/bin:$PATH python3 ~/PycharmProjects/python_experiments/checkout.py \
# "export WAFLOCK=.waf-lock-dbg_exc_mt BDE_WAF_UFID=dbg_exc_mt; waf configure build --target=bsl" \
# "export WAFLOCK=.waf-lock-opt_exc_mt BDE_WAF_UFID=opt_exc_mt; waf configure build --target=bsl"
|
<commit_before>__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(repo, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
("git clone %s"%repo),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: %s"%line)
loop = asyncio.get_event_loop()
task = async_exec(sys.argv[1], test_callback)
loop.run_until_complete(task)<commit_msg>Add comment showing parallel waf invocation<commit_after>__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(command, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
(command),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: '%s'"%line)
loop = asyncio.get_event_loop()
tasks = []
for command in sys.argv[1:]:
task = async_exec(command, test_callback)
tasks.append(task)
loop.run_until_complete(asyncio.wait(tasks))
# Test with
# PATH=$PWD/../bde-tools/bin:$PATH python3 ~/PycharmProjects/python_experiments/checkout.py \
# "export WAFLOCK=.waf-lock-dbg_exc_mt BDE_WAF_UFID=dbg_exc_mt; waf configure build --target=bsl" \
# "export WAFLOCK=.waf-lock-opt_exc_mt BDE_WAF_UFID=opt_exc_mt; waf configure build --target=bsl"
|
603ad671c1f6976f75065a4365589a75e1e384ee
|
service_and_process/serializers.py
|
service_and_process/serializers.py
|
from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
|
from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
fields = '__all__'
|
Add explicit fields in serializer
|
Add explicit fields in serializer
|
Python
|
apache-2.0
|
rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory
|
from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
Add explicit fields in serializer
|
from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
fields = '__all__'
|
<commit_before>from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
<commit_msg>Add explicit fields in serializer<commit_after>
|
from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
fields = '__all__'
|
from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
Add explicit fields in serializerfrom .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
fields = '__all__'
|
<commit_before>from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
<commit_msg>Add explicit fields in serializer<commit_after>from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
fields = '__all__'
|
910c21778751e2814e649adf6f4db99378891ab1
|
_lib/wordpress_post_processor.py
|
_lib/wordpress_post_processor.py
|
import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
|
import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_fj_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
|
Update to new multiauthor taxonomy name
|
Update to new multiauthor taxonomy name
|
Python
|
cc0-1.0
|
kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh
|
import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
Update to new multiauthor taxonomy name
|
import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_fj_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
|
<commit_before>import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
<commit_msg>Update to new multiauthor taxonomy name<commit_after>
|
import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_fj_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
|
import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
Update to new multiauthor taxonomy nameimport sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_fj_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
|
<commit_before>import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
<commit_msg>Update to new multiauthor taxonomy name<commit_after>import sys
import json
import requests
from string import Template
import dateutil.parser
def posts_at_url(url):
current_page = 1
max_page = sys.maxint
while current_page <= max_page:
resp = requests.get(url, params={'json':1,'page':current_page})
results = json.loads(resp.content)
current_page += 1
max_page = results['pages']
for p in results['posts']:
yield p
def documents(name, url, **kwargs):
for post in posts_at_url(url):
yield process_post(post)
def process_post(post):
del post['comments']
post['_id'] = post['slug']
# remove fields we're not interested in
post['category'] = [cat['title'] for cat in post['taxonomy_fj_category']]
post['tags'] = [tag['title'] for tag in post['taxonomy_fj_tag']]
author_template = Template("$first_name $last_name")
post['author'] = [author['title'] for author in post['taxonomy_fj_author']]
dt = dateutil.parser.parse(post['date'])
dt_string = dt.strftime('%Y-%m-%dT%H:%M:%SZ')
post['date'] = dt_string
return post
|
f243d309e5168b5855045227c9c0a6b082bedc69
|
luigi/tasks/gtrnadb/__init__.py
|
luigi/tasks/gtrnadb/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import iglob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file to
create the CSV files, but does not run the secondary structure importing.
That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
for filename in iglob(config.pattern):
yield GtRNAdbJsonToCsv(input_file=filename)
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import glob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file
to create the CSV files, but does not run the secondary structure
importing. That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
files = glob(config.pattern)
if not files:
raise ValueError("No GtRNAdb data files file")
for filename in files:
yield GtRNAdbJsonToCsv(input_file=filename)
|
Check that there are data files to import
|
Check that there are data files to import
It is possible for the pattern to match nothing leading to no files
being imported. This is an error case so we raise an exception if it
happens.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import iglob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file to
create the CSV files, but does not run the secondary structure importing.
That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
for filename in iglob(config.pattern):
yield GtRNAdbJsonToCsv(input_file=filename)
Check that there are data files to import
It is possible for the pattern to match nothing leading to no files
being imported. This is an error case so we raise an exception if it
happens.
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import glob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file
to create the CSV files, but does not run the secondary structure
importing. That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
files = glob(config.pattern)
if not files:
raise ValueError("No GtRNAdb data files file")
for filename in files:
yield GtRNAdbJsonToCsv(input_file=filename)
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import iglob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file to
create the CSV files, but does not run the secondary structure importing.
That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
for filename in iglob(config.pattern):
yield GtRNAdbJsonToCsv(input_file=filename)
<commit_msg>Check that there are data files to import
It is possible for the pattern to match nothing leading to no files
being imported. This is an error case so we raise an exception if it
happens.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import glob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file
to create the CSV files, but does not run the secondary structure
importing. That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
files = glob(config.pattern)
if not files:
raise ValueError("No GtRNAdb data files file")
for filename in files:
yield GtRNAdbJsonToCsv(input_file=filename)
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import iglob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file to
create the CSV files, but does not run the secondary structure importing.
That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
for filename in iglob(config.pattern):
yield GtRNAdbJsonToCsv(input_file=filename)
Check that there are data files to import
It is possible for the pattern to match nothing leading to no files
being imported. This is an error case so we raise an exception if it
happens.# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import glob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file
to create the CSV files, but does not run the secondary structure
importing. That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
files = glob(config.pattern)
if not files:
raise ValueError("No GtRNAdb data files file")
for filename in files:
yield GtRNAdbJsonToCsv(input_file=filename)
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import iglob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file to
create the CSV files, but does not run the secondary structure importing.
That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
for filename in iglob(config.pattern):
yield GtRNAdbJsonToCsv(input_file=filename)
<commit_msg>Check that there are data files to import
It is possible for the pattern to match nothing leading to no files
being imported. This is an error case so we raise an exception if it
happens.<commit_after># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import glob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file
to create the CSV files, but does not run the secondary structure
importing. That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
files = glob(config.pattern)
if not files:
raise ValueError("No GtRNAdb data files file")
for filename in files:
yield GtRNAdbJsonToCsv(input_file=filename)
|
9c7090215ecda3fd4d173c8c5f2d3e1462fbbeee
|
takePicture.py
|
takePicture.py
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
#os.unlink('greg.jpg')
img = cam.capture('gregTest.jpg')
time.sleep(.25)
#oc.rename('gregTemp.jpg', 'greg.jpg')
x +=1
exit()
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
os.unlink('gregTest.jpg')
img = cam.capture('tempGregTest.jpg')
oc.rename('gregTempTest.jpg', 'gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
Add temp file sequence to take picture file
|
Add temp file sequence to take picture file
|
Python
|
mit
|
jwarshaw/RaspberryDrive
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
#os.unlink('greg.jpg')
img = cam.capture('gregTest.jpg')
time.sleep(.25)
#oc.rename('gregTemp.jpg', 'greg.jpg')
x +=1
exit()
Add temp file sequence to take picture file
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
os.unlink('gregTest.jpg')
img = cam.capture('tempGregTest.jpg')
oc.rename('gregTempTest.jpg', 'gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
<commit_before>import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
#os.unlink('greg.jpg')
img = cam.capture('gregTest.jpg')
time.sleep(.25)
#oc.rename('gregTemp.jpg', 'greg.jpg')
x +=1
exit()
<commit_msg>Add temp file sequence to take picture file<commit_after>
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
os.unlink('gregTest.jpg')
img = cam.capture('tempGregTest.jpg')
oc.rename('gregTempTest.jpg', 'gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
#os.unlink('greg.jpg')
img = cam.capture('gregTest.jpg')
time.sleep(.25)
#oc.rename('gregTemp.jpg', 'greg.jpg')
x +=1
exit()
Add temp file sequence to take picture fileimport picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
os.unlink('gregTest.jpg')
img = cam.capture('tempGregTest.jpg')
oc.rename('gregTempTest.jpg', 'gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
<commit_before>import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
#os.unlink('greg.jpg')
img = cam.capture('gregTest.jpg')
time.sleep(.25)
#oc.rename('gregTemp.jpg', 'greg.jpg')
x +=1
exit()
<commit_msg>Add temp file sequence to take picture file<commit_after>import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
os.unlink('gregTest.jpg')
img = cam.capture('tempGregTest.jpg')
oc.rename('gregTempTest.jpg', 'gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
9f05a8917ee6fd01a334ef2e1e57062be8ef13af
|
byceps/config_defaults.py
|
byceps/config_defaults.py
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
Enable DBMS pool pre-pinging to avoid connection errors
|
Enable DBMS pool pre-pinging to avoid connection errors
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
Enable DBMS pool pre-pinging to avoid connection errors
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
<commit_before>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
<commit_msg>Enable DBMS pool pre-pinging to avoid connection errors<commit_after>
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
Enable DBMS pool pre-pinging to avoid connection errors"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
<commit_before>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
<commit_msg>Enable DBMS pool pre-pinging to avoid connection errors<commit_after>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
968b862f6e437b627776b9b8ccf6204434493101
|
tests/test_rover_instance.py
|
tests/test_rover_instance.py
|
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
|
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
def test_rover_position(self):
assert self.rover.position == (self.rover.x, self.rover.y, self.rover.direction)
|
Add failing rover position reporting test
|
Add failing rover position reporting test
|
Python
|
mit
|
authentik8/rover
|
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
Add failing rover position reporting test
|
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
def test_rover_position(self):
assert self.rover.position == (self.rover.x, self.rover.y, self.rover.direction)
|
<commit_before>
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
<commit_msg>Add failing rover position reporting test<commit_after>
|
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
def test_rover_position(self):
assert self.rover.position == (self.rover.x, self.rover.y, self.rover.direction)
|
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
Add failing rover position reporting test
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
def test_rover_position(self):
assert self.rover.position == (self.rover.x, self.rover.y, self.rover.direction)
|
<commit_before>
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
<commit_msg>Add failing rover position reporting test<commit_after>
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
def test_rover_position(self):
assert self.rover.position == (self.rover.x, self.rover.y, self.rover.direction)
|
4d6c580f5dcf246bd75b499ee7a630eaf024b4d0
|
harvester/sns_message.py
|
harvester/sns_message.py
|
import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
|
import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on :worker: {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
|
Add worker emoji to subject
|
Add worker emoji to subject
|
Python
|
bsd-3-clause
|
barbarahui/harvester,mredar/harvester,ucldc/harvester,barbarahui/harvester,mredar/harvester,ucldc/harvester
|
import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
Add worker emoji to subject
|
import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on :worker: {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
|
<commit_before>import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
<commit_msg>Add worker emoji to subject<commit_after>
|
import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on :worker: {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
|
import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
Add worker emoji to subjectimport os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on :worker: {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
|
<commit_before>import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
<commit_msg>Add worker emoji to subject<commit_after>import os
import boto3
import botocore.exceptions
import logging
import requests
logger = logging.getLogger(__name__)
def format_results_subject(cid, registry_action):
'''Format the "subject" part of the harvesting message for
results from the various processes.
Results: [Action from Registry] on [Worker IP] for Collection ID [###]
'''
if '{env}' in registry_action:
registry_action = registry_action.format(
env=os.environ.get('DATA_BRANCH'))
resp = requests.get('http://169.254.169.254/latest/meta-data/local-ipv4')
worker_ip = resp.text
worker_id = worker_ip.replace('.', '-')
return 'Results: {} on :worker: {} for CID: {}'.format(
registry_action,
worker_id,
cid)
def publish_to_harvesting(subject, message):
'''Publish a SNS message to the harvesting topic channel'''
client = boto3.client('sns')
# NOTE: this appears to raise exceptions if problem
try:
client.publish(
TopicArn=os.environ['ARN_TOPIC_HARVESTING_REPORT'],
Message=message,
Subject=subject if len(subject) <= 100 else subject[:100]
)
except botocore.exceptions.BotoCoreError, e:
logger.error('Exception in Boto SNS: {}'.format(e))
|
5c442db8a6352c21325f372486409d44ad3f5b76
|
ServerBackup.py
|
ServerBackup.py
|
#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, os
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
|
#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, PushBullet, os
from time import localtime, strftime
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
date = strftime("%m_%d_%Y_%H_%M_%S", localtime())
sbFolder = sbLocation + "backup_" + date + "/"
os.makedirs(sbFolder)
if not os.path.exists(sbFolder):
message = "Folder, \"%s\", couldn't be made" % sbFolder
sbLogger.critical(message)
PushBullet.sendPushNote({'id':PushBullet.getPushDevicesIds(), 'title':"Server Backup Error", 'message':message})
exit(message)
|
Backup directory is made, and a notification is sent and logged if the directory doesn't exist
|
Backup directory is made, and a notification is sent and logged if the directory doesn't exist
|
Python
|
mit
|
dwieeb/usr-local-bin
|
#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, os
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
Backup directory is made, and a notification is sent and logged if the directory doesn't exist
|
#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, PushBullet, os
from time import localtime, strftime
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
date = strftime("%m_%d_%Y_%H_%M_%S", localtime())
sbFolder = sbLocation + "backup_" + date + "/"
os.makedirs(sbFolder)
if not os.path.exists(sbFolder):
message = "Folder, \"%s\", couldn't be made" % sbFolder
sbLogger.critical(message)
PushBullet.sendPushNote({'id':PushBullet.getPushDevicesIds(), 'title':"Server Backup Error", 'message':message})
exit(message)
|
<commit_before>#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, os
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
<commit_msg>Backup directory is made, and a notification is sent and logged if the directory doesn't exist<commit_after>
|
#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, PushBullet, os
from time import localtime, strftime
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
date = strftime("%m_%d_%Y_%H_%M_%S", localtime())
sbFolder = sbLocation + "backup_" + date + "/"
os.makedirs(sbFolder)
if not os.path.exists(sbFolder):
message = "Folder, \"%s\", couldn't be made" % sbFolder
sbLogger.critical(message)
PushBullet.sendPushNote({'id':PushBullet.getPushDevicesIds(), 'title':"Server Backup Error", 'message':message})
exit(message)
|
#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, os
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
Backup directory is made, and a notification is sent and logged if the directory doesn't exist#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, PushBullet, os
from time import localtime, strftime
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
date = strftime("%m_%d_%Y_%H_%M_%S", localtime())
sbFolder = sbLocation + "backup_" + date + "/"
os.makedirs(sbFolder)
if not os.path.exists(sbFolder):
message = "Folder, \"%s\", couldn't be made" % sbFolder
sbLogger.critical(message)
PushBullet.sendPushNote({'id':PushBullet.getPushDevicesIds(), 'title':"Server Backup Error", 'message':message})
exit(message)
|
<commit_before>#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, os
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
<commit_msg>Backup directory is made, and a notification is sent and logged if the directory doesn't exist<commit_after>#!/usr/bin/python2
import LogUncaught, ConfigParser, logging, PushBullet, os
from time import localtime, strftime
sbConfig = ConfigParser.RawConfigParser()
sbConfig.read('scripts.cfg')
# Logger File Handler
sbLFH = logging.FileHandler(sbConfig.get('ServerBackup', 'log_location'))
sbLFH.setLevel(logging.DEBUG)
# Logger Formatter
sbLFORMAT = logging.Formatter('[%(asctime)s | %(levelname)s] - %(message)s')
sbLFH.setFormatter(sbLFORMAT)
# Logger
sbLogger = logging.getLogger("serverbackup_logger")
sbLogger.setLevel(logging.DEBUG)
sbLogger.addHandler(sbLFH)
sbLogger.info("Script has begun")
sbLocation = sbConfig.get('ServerBackup', 'backup_location')
databasePass = sbConfig.get('Database', 'password')
lbLocation = sbConfig.get('LogBackup', 'backup_location')
date = strftime("%m_%d_%Y_%H_%M_%S", localtime())
sbFolder = sbLocation + "backup_" + date + "/"
os.makedirs(sbFolder)
if not os.path.exists(sbFolder):
message = "Folder, \"%s\", couldn't be made" % sbFolder
sbLogger.critical(message)
PushBullet.sendPushNote({'id':PushBullet.getPushDevicesIds(), 'title':"Server Backup Error", 'message':message})
exit(message)
|
f4444f390ed2d16fab40e098d743870420da3bad
|
blockbuster/bb_logging.py
|
blockbuster/bb_logging.py
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.INFO)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
Change log level for file handler
|
Change log level for file handler
|
Python
|
mit
|
mattstibbs/blockbuster-server,mattstibbs/blockbuster-server
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)Change log level for file handler
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.INFO)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
<commit_before>import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)<commit_msg>Change log level for file handler<commit_after>
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.INFO)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)Change log level for file handlerimport config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.INFO)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
<commit_before>import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)<commit_msg>Change log level for file handler<commit_after>import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger("blockbuster")
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.INFO)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s %(levelname)s [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
21835415f0224e08c7328151d4319ec73d67cbe1
|
station.py
|
station.py
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = int(eval(input("Enter the max capacity of the station: ")))
#testfuntion()
self.escalators = int(eval(input("Enter the number of escalators in the station: ")))
#testfuntion()
self.train_wait = int(eval(input("Enter the wait time between trains in seconds: ")))
#testfuntion()
self.travelors_arriving = int(eval(input("How many people just exited the train? ")))
#testfuntion()
self.travelors_departing = int(eval(input("How many people are waiting for the train? ")))
#testfuntion()
|
Add int to input statements
|
Add int to input statements
Ref #23 #10
|
Python
|
mit
|
ForestPride/rail-problem
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
Add int to input statements
Ref #23 #10
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = int(eval(input("Enter the max capacity of the station: ")))
#testfuntion()
self.escalators = int(eval(input("Enter the number of escalators in the station: ")))
#testfuntion()
self.train_wait = int(eval(input("Enter the wait time between trains in seconds: ")))
#testfuntion()
self.travelors_arriving = int(eval(input("How many people just exited the train? ")))
#testfuntion()
self.travelors_departing = int(eval(input("How many people are waiting for the train? ")))
#testfuntion()
|
<commit_before>"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
<commit_msg>Add int to input statements
Ref #23 #10<commit_after>
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = int(eval(input("Enter the max capacity of the station: ")))
#testfuntion()
self.escalators = int(eval(input("Enter the number of escalators in the station: ")))
#testfuntion()
self.train_wait = int(eval(input("Enter the wait time between trains in seconds: ")))
#testfuntion()
self.travelors_arriving = int(eval(input("How many people just exited the train? ")))
#testfuntion()
self.travelors_departing = int(eval(input("How many people are waiting for the train? ")))
#testfuntion()
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
Add int to input statements
Ref #23 #10"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = int(eval(input("Enter the max capacity of the station: ")))
#testfuntion()
self.escalators = int(eval(input("Enter the number of escalators in the station: ")))
#testfuntion()
self.train_wait = int(eval(input("Enter the wait time between trains in seconds: ")))
#testfuntion()
self.travelors_arriving = int(eval(input("How many people just exited the train? ")))
#testfuntion()
self.travelors_departing = int(eval(input("How many people are waiting for the train? ")))
#testfuntion()
|
<commit_before>"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
<commit_msg>Add int to input statements
Ref #23 #10<commit_after>"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = int(eval(input("Enter the max capacity of the station: ")))
#testfuntion()
self.escalators = int(eval(input("Enter the number of escalators in the station: ")))
#testfuntion()
self.train_wait = int(eval(input("Enter the wait time between trains in seconds: ")))
#testfuntion()
self.travelors_arriving = int(eval(input("How many people just exited the train? ")))
#testfuntion()
self.travelors_departing = int(eval(input("How many people are waiting for the train? ")))
#testfuntion()
|
be6fb5a1ec264f2f3f5dd57c84b90a9c2c686fe3
|
mltsp/ext/celeryconfig.py
|
mltsp/ext/celeryconfig.py
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
CELERY_RETHINKDB_BACKEND_SETTINGS = {
'host': '127.0.0.1',
'port': 28015,
'db': 'celery_test',
# 'auth_key': '',
'timeout': 20,
'table': 'celery_taskmeta',
'options': {}
}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
Change Celery RethinkDB backend config
|
Change Celery RethinkDB backend config
|
Python
|
bsd-3-clause
|
bnaul/mltsp,bnaul/mltsp,bnaul/mltsp,mltsp/mltsp,acrellin/mltsp,acrellin/mltsp,mltsp/mltsp,bnaul/mltsp,bnaul/mltsp,acrellin/mltsp,mltsp/mltsp,acrellin/mltsp,mltsp/mltsp,mltsp/mltsp,mltsp/mltsp,acrellin/mltsp,acrellin/mltsp,bnaul/mltsp
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
Change Celery RethinkDB backend config
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
CELERY_RETHINKDB_BACKEND_SETTINGS = {
'host': '127.0.0.1',
'port': 28015,
'db': 'celery_test',
# 'auth_key': '',
'timeout': 20,
'table': 'celery_taskmeta',
'options': {}
}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
<commit_before>#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
<commit_msg>Change Celery RethinkDB backend config<commit_after>
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
CELERY_RETHINKDB_BACKEND_SETTINGS = {
'host': '127.0.0.1',
'port': 28015,
'db': 'celery_test',
# 'auth_key': '',
'timeout': 20,
'table': 'celery_taskmeta',
'options': {}
}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
Change Celery RethinkDB backend config#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
CELERY_RETHINKDB_BACKEND_SETTINGS = {
'host': '127.0.0.1',
'port': 28015,
'db': 'celery_test',
# 'auth_key': '',
'timeout': 20,
'table': 'celery_taskmeta',
'options': {}
}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
<commit_before>#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
#CELERY_RETHINKDB_BACKEND_SETTINGS = {
# 'host': '127.0.0.1',
# 'port': 28015,
# 'db': 'celery_test',
# 'auth_key': '',
# 'timeout': 20,
# 'table': 'celery_taskmeta',
# 'options': {}
#}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
<commit_msg>Change Celery RethinkDB backend config<commit_after>#CELERY_RESULT_BACKEND = 'mltsp.ext.rethinkdb_backend:RethinkBackend'
CELERY_RESULT_BACKEND = "amqp"
CELERY_RETHINKDB_BACKEND_SETTINGS = {
'host': '127.0.0.1',
'port': 28015,
'db': 'celery_test',
# 'auth_key': '',
'timeout': 20,
'table': 'celery_taskmeta',
'options': {}
}
CELERY_RESULT_SERIALIZER = 'json' # NOTE: MUST BE SET TO JSON
#CELERYD_LOG_FILE = "/tmp/celery.log"
CELERYD_LOG_LEVEL = "DEBUG"
INSTALLED_APPS = ["mltsp"]
CELERY_IMPORTS = ("mltsp", "celery_fit")
|
42d64b71db7a21355132d1c1573e12798e377b4c
|
incomplete/pythagoras.py
|
incomplete/pythagoras.py
|
import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
pass
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
|
import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
if depth == 0:
return [],[]
pd = (p2[0] - p1[0]),(p1[1] - p2[1])
p3 = (p2[0] - pd[1]),(p2[1] - pd[0])
p4 = (p1[0] - pd[1]),(p1[1] - pd[0])
p5 = (p4[0] + (pd[0] - pd[1])/2),(p4[1] - (pd[0] + pd[1])/2)
squares_left,triangles_left = gather_squares_triangles(p4,p5,depth-1)
squares_right,triangles_right = gather_squares_triangles(p5,p3,depth-1)
squares = [[p1,p2,p3,p4]]+squares_left+squares_right
triangles = [[p3,p4,p5]]+triangles_left+triangles_right
return squares,triangles
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
|
Gather Squares & Triangles Implemented
|
PythagTree: Gather Squares & Triangles Implemented
|
Python
|
mit
|
kpatel20538/Rosetta-Code-Python-Tasks
|
import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
pass
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
PythagTree: Gather Squares & Triangles Implemented
|
import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
if depth == 0:
return [],[]
pd = (p2[0] - p1[0]),(p1[1] - p2[1])
p3 = (p2[0] - pd[1]),(p2[1] - pd[0])
p4 = (p1[0] - pd[1]),(p1[1] - pd[0])
p5 = (p4[0] + (pd[0] - pd[1])/2),(p4[1] - (pd[0] + pd[1])/2)
squares_left,triangles_left = gather_squares_triangles(p4,p5,depth-1)
squares_right,triangles_right = gather_squares_triangles(p5,p3,depth-1)
squares = [[p1,p2,p3,p4]]+squares_left+squares_right
triangles = [[p3,p4,p5]]+triangles_left+triangles_right
return squares,triangles
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
|
<commit_before>import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
pass
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
<commit_msg>PythagTree: Gather Squares & Triangles Implemented<commit_after>
|
import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
if depth == 0:
return [],[]
pd = (p2[0] - p1[0]),(p1[1] - p2[1])
p3 = (p2[0] - pd[1]),(p2[1] - pd[0])
p4 = (p1[0] - pd[1]),(p1[1] - pd[0])
p5 = (p4[0] + (pd[0] - pd[1])/2),(p4[1] - (pd[0] + pd[1])/2)
squares_left,triangles_left = gather_squares_triangles(p4,p5,depth-1)
squares_right,triangles_right = gather_squares_triangles(p5,p3,depth-1)
squares = [[p1,p2,p3,p4]]+squares_left+squares_right
triangles = [[p3,p4,p5]]+triangles_left+triangles_right
return squares,triangles
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
|
import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
pass
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
PythagTree: Gather Squares & Triangles Implementedimport sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
if depth == 0:
return [],[]
pd = (p2[0] - p1[0]),(p1[1] - p2[1])
p3 = (p2[0] - pd[1]),(p2[1] - pd[0])
p4 = (p1[0] - pd[1]),(p1[1] - pd[0])
p5 = (p4[0] + (pd[0] - pd[1])/2),(p4[1] - (pd[0] + pd[1])/2)
squares_left,triangles_left = gather_squares_triangles(p4,p5,depth-1)
squares_right,triangles_right = gather_squares_triangles(p5,p3,depth-1)
squares = [[p1,p2,p3,p4]]+squares_left+squares_right
triangles = [[p3,p4,p5]]+triangles_left+triangles_right
return squares,triangles
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
|
<commit_before>import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
pass
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
<commit_msg>PythagTree: Gather Squares & Triangles Implemented<commit_after>import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
if depth == 0:
return [],[]
pd = (p2[0] - p1[0]),(p1[1] - p2[1])
p3 = (p2[0] - pd[1]),(p2[1] - pd[0])
p4 = (p1[0] - pd[1]),(p1[1] - pd[0])
p5 = (p4[0] + (pd[0] - pd[1])/2),(p4[1] - (pd[0] + pd[1])/2)
squares_left,triangles_left = gather_squares_triangles(p4,p5,depth-1)
squares_right,triangles_right = gather_squares_triangles(p5,p3,depth-1)
squares = [[p1,p2,p3,p4]]+squares_left+squares_right
triangles = [[p3,p4,p5]]+triangles_left+triangles_right
return squares,triangles
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
|
3609df9044fd72008234bae9145487f315096fcd
|
hcalendar/__init__.py
|
hcalendar/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
"""
python-hcalendar is a basic hCalendar parser
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
python-hcalendar is a basic hCalendar parser
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
|
Fix hcalendar module __doc__ missing
|
Fix hcalendar module __doc__ missing
|
Python
|
mit
|
mback2k/python-hcalendar
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
"""
python-hcalendar is a basic hCalendar parser
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
Fix hcalendar module __doc__ missing
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
python-hcalendar is a basic hCalendar parser
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
"""
python-hcalendar is a basic hCalendar parser
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
<commit_msg>Fix hcalendar module __doc__ missing<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
python-hcalendar is a basic hCalendar parser
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
"""
python-hcalendar is a basic hCalendar parser
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
Fix hcalendar module __doc__ missing#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
python-hcalendar is a basic hCalendar parser
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
"""
python-hcalendar is a basic hCalendar parser
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
<commit_msg>Fix hcalendar module __doc__ missing<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
python-hcalendar is a basic hCalendar parser
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
|
5688ca60985db606a3d42078a017bd851c1f01f6
|
build/fbcode_builder/specs/fbthrift.py
|
build/fbcode_builder/specs/fbthrift.py
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
builder.add_option('krb5/krb5:git_hash', 'krb5-1.16.1-final')
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.github_project_workdir('krb5/krb5', 'src'),
builder.autoconf_install('krb5/krb5'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
Cut fbcode_builder dep for thrift on krb5
|
Cut fbcode_builder dep for thrift on krb5
Summary: [Thrift] Cut `fbcode_builder` dep for `thrift` on `krb5`. In the past, Thrift depended on Kerberos and the `krb5` implementation for its transport-layer security. However, Thrift has since migrated fully to Transport Layer Security for its transport-layer security and no longer has any build-time dependency on `krb5`. Clean this up.
Reviewed By: stevegury, vitaut
Differential Revision: D14814205
fbshipit-source-id: dca469d22098e34573674194facaaac6c4c6aa32
|
Python
|
apache-2.0
|
facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
builder.add_option('krb5/krb5:git_hash', 'krb5-1.16.1-final')
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.github_project_workdir('krb5/krb5', 'src'),
builder.autoconf_install('krb5/krb5'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
Cut fbcode_builder dep for thrift on krb5
Summary: [Thrift] Cut `fbcode_builder` dep for `thrift` on `krb5`. In the past, Thrift depended on Kerberos and the `krb5` implementation for its transport-layer security. However, Thrift has since migrated fully to Transport Layer Security for its transport-layer security and no longer has any build-time dependency on `krb5`. Clean this up.
Reviewed By: stevegury, vitaut
Differential Revision: D14814205
fbshipit-source-id: dca469d22098e34573674194facaaac6c4c6aa32
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
<commit_before>#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
builder.add_option('krb5/krb5:git_hash', 'krb5-1.16.1-final')
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.github_project_workdir('krb5/krb5', 'src'),
builder.autoconf_install('krb5/krb5'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
<commit_msg>Cut fbcode_builder dep for thrift on krb5
Summary: [Thrift] Cut `fbcode_builder` dep for `thrift` on `krb5`. In the past, Thrift depended on Kerberos and the `krb5` implementation for its transport-layer security. However, Thrift has since migrated fully to Transport Layer Security for its transport-layer security and no longer has any build-time dependency on `krb5`. Clean this up.
Reviewed By: stevegury, vitaut
Differential Revision: D14814205
fbshipit-source-id: dca469d22098e34573674194facaaac6c4c6aa32<commit_after>
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
builder.add_option('krb5/krb5:git_hash', 'krb5-1.16.1-final')
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.github_project_workdir('krb5/krb5', 'src'),
builder.autoconf_install('krb5/krb5'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
Cut fbcode_builder dep for thrift on krb5
Summary: [Thrift] Cut `fbcode_builder` dep for `thrift` on `krb5`. In the past, Thrift depended on Kerberos and the `krb5` implementation for its transport-layer security. However, Thrift has since migrated fully to Transport Layer Security for its transport-layer security and no longer has any build-time dependency on `krb5`. Clean this up.
Reviewed By: stevegury, vitaut
Differential Revision: D14814205
fbshipit-source-id: dca469d22098e34573674194facaaac6c4c6aa32#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
<commit_before>#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
builder.add_option('krb5/krb5:git_hash', 'krb5-1.16.1-final')
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.github_project_workdir('krb5/krb5', 'src'),
builder.autoconf_install('krb5/krb5'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
<commit_msg>Cut fbcode_builder dep for thrift on krb5
Summary: [Thrift] Cut `fbcode_builder` dep for `thrift` on `krb5`. In the past, Thrift depended on Kerberos and the `krb5` implementation for its transport-layer security. However, Thrift has since migrated fully to Transport Layer Security for its transport-layer security and no longer has any build-time dependency on `krb5`. Clean this up.
Reviewed By: stevegury, vitaut
Differential Revision: D14814205
fbshipit-source-id: dca469d22098e34573674194facaaac6c4c6aa32<commit_after>#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
65e041bd03863563b52496c1cec81a0c9425f4ee
|
geonamescache/mappers.py
|
geonamescache/mappers.py
|
# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(key):
if 'name' == from_key and key in mappings.country_names:
key = mappings.country_names[key]
item = dataset.get(key)
if item:
return item[to_key]
return mapper
|
# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
"""Creates and returns a mapper function to access country data.
The mapper function that is returned must be called with one argument. In
the default case you call it with a name and it returns a 3-letter
ISO_3166-1 code, e. g. called with ``Spain`` it would return ``ESP``.
:param from_key: (optional) the country attribute you give as input.
Defaults to ``name``.
:param to_key: (optional) the country attribute you want as output.
Defaults to ``iso``.
:return: mapper
:rtype: function
"""
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(input):
# For country name inputs take the names mapping into account.
if 'name' == from_key:
input = mappings.country_names.get(input, input)
# If there is a record return the demanded attribute.
item = dataset.get(input)
if item:
return item[to_key]
return mapper
|
Add documentation for country mapper
|
Add documentation for country mapper
|
Python
|
mit
|
yaph/geonamescache,yaph/geonamescache
|
# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(key):
if 'name' == from_key and key in mappings.country_names:
key = mappings.country_names[key]
item = dataset.get(key)
if item:
return item[to_key]
return mapperAdd documentation for country mapper
|
# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
"""Creates and returns a mapper function to access country data.
The mapper function that is returned must be called with one argument. In
the default case you call it with a name and it returns a 3-letter
ISO_3166-1 code, e. g. called with ``Spain`` it would return ``ESP``.
:param from_key: (optional) the country attribute you give as input.
Defaults to ``name``.
:param to_key: (optional) the country attribute you want as output.
Defaults to ``iso``.
:return: mapper
:rtype: function
"""
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(input):
# For country name inputs take the names mapping into account.
if 'name' == from_key:
input = mappings.country_names.get(input, input)
# If there is a record return the demanded attribute.
item = dataset.get(input)
if item:
return item[to_key]
return mapper
|
<commit_before># -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(key):
if 'name' == from_key and key in mappings.country_names:
key = mappings.country_names[key]
item = dataset.get(key)
if item:
return item[to_key]
return mapper<commit_msg>Add documentation for country mapper<commit_after>
|
# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
"""Creates and returns a mapper function to access country data.
The mapper function that is returned must be called with one argument. In
the default case you call it with a name and it returns a 3-letter
ISO_3166-1 code, e. g. called with ``Spain`` it would return ``ESP``.
:param from_key: (optional) the country attribute you give as input.
Defaults to ``name``.
:param to_key: (optional) the country attribute you want as output.
Defaults to ``iso``.
:return: mapper
:rtype: function
"""
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(input):
# For country name inputs take the names mapping into account.
if 'name' == from_key:
input = mappings.country_names.get(input, input)
# If there is a record return the demanded attribute.
item = dataset.get(input)
if item:
return item[to_key]
return mapper
|
# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(key):
if 'name' == from_key and key in mappings.country_names:
key = mappings.country_names[key]
item = dataset.get(key)
if item:
return item[to_key]
return mapperAdd documentation for country mapper# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
"""Creates and returns a mapper function to access country data.
The mapper function that is returned must be called with one argument. In
the default case you call it with a name and it returns a 3-letter
ISO_3166-1 code, e. g. called with ``Spain`` it would return ``ESP``.
:param from_key: (optional) the country attribute you give as input.
Defaults to ``name``.
:param to_key: (optional) the country attribute you want as output.
Defaults to ``iso``.
:return: mapper
:rtype: function
"""
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(input):
# For country name inputs take the names mapping into account.
if 'name' == from_key:
input = mappings.country_names.get(input, input)
# If there is a record return the demanded attribute.
item = dataset.get(input)
if item:
return item[to_key]
return mapper
|
<commit_before># -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(key):
if 'name' == from_key and key in mappings.country_names:
key = mappings.country_names[key]
item = dataset.get(key)
if item:
return item[to_key]
return mapper<commit_msg>Add documentation for country mapper<commit_after># -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
"""Creates and returns a mapper function to access country data.
The mapper function that is returned must be called with one argument. In
the default case you call it with a name and it returns a 3-letter
ISO_3166-1 code, e. g. called with ``Spain`` it would return ``ESP``.
:param from_key: (optional) the country attribute you give as input.
Defaults to ``name``.
:param to_key: (optional) the country attribute you want as output.
Defaults to ``iso``.
:return: mapper
:rtype: function
"""
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(input):
# For country name inputs take the names mapping into account.
if 'name' == from_key:
input = mappings.country_names.get(input, input)
# If there is a record return the demanded attribute.
item = dataset.get(input)
if item:
return item[to_key]
return mapper
|
b34eaedad04c32252f8f2972c335635c6783ae79
|
evelink/__init__.py
|
evelink/__init__.py
|
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.0"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
|
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.1"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
|
Update version to 0.5.1 for Hyperion release
|
Update version to 0.5.1 for Hyperion release
|
Python
|
mit
|
bastianh/evelink,FashtimeDotCom/evelink,ayust/evelink,Morloth1274/EVE-Online-POCO-manager,zigdon/evelink
|
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.0"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
Update version to 0.5.1 for Hyperion release
|
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.1"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
|
<commit_before>"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.0"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
<commit_msg>Update version to 0.5.1 for Hyperion release<commit_after>
|
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.1"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
|
"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.0"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
Update version to 0.5.1 for Hyperion release"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.1"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
|
<commit_before>"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.0"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
<commit_msg>Update version to 0.5.1 for Hyperion release<commit_after>"""EVELink - Python bindings for the EVE API."""
import logging
from evelink import account
from evelink import api
from evelink import char
from evelink import constants
from evelink import corp
from evelink import eve
from evelink import map
from evelink import server
__version__ = "0.5.1"
# Implement NullHandler because it was only added in Python 2.7+.
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Create a logger, but by default, have it do nothing
_log = logging.getLogger('evelink')
_log.addHandler(NullHandler())
# Update the version number used in the user-agent
api._user_agent = 'evelink v%s' % __version__
__all__ = [
"account",
"api",
"char",
"constants",
"corp",
"eve",
"map",
"parsing",
"server",
]
|
2814f5b2bbd2c53c165f13009eb85cb2c5030b57
|
chicago/search_indexes.py
|
chicago/search_indexes.py
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if obj.last_action_date:
now = app_timezone.localize(datetime.now())
# obj.last_action_date can be in the future
weeks_passed = (now - obj.last_action_date).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if data['last_action_date']:
today = app_timezone.localize(datetime.now()).date()
# data['last_action_date'] can be in the future
weeks_passed = (today - data['last_action_date']).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
Use prepared data, rather than the object last action date, to determine boost
|
Use prepared data, rather than the object last action date, to determine boost
|
Python
|
mit
|
datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if obj.last_action_date:
now = app_timezone.localize(datetime.now())
# obj.last_action_date can be in the future
weeks_passed = (now - obj.last_action_date).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
Use prepared data, rather than the object last action date, to determine boost
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if data['last_action_date']:
today = app_timezone.localize(datetime.now()).date()
# data['last_action_date'] can be in the future
weeks_passed = (today - data['last_action_date']).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
<commit_before>from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if obj.last_action_date:
now = app_timezone.localize(datetime.now())
# obj.last_action_date can be in the future
weeks_passed = (now - obj.last_action_date).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
<commit_msg>Use prepared data, rather than the object last action date, to determine boost<commit_after>
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if data['last_action_date']:
today = app_timezone.localize(datetime.now()).date()
# data['last_action_date'] can be in the future
weeks_passed = (today - data['last_action_date']).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if obj.last_action_date:
now = app_timezone.localize(datetime.now())
# obj.last_action_date can be in the future
weeks_passed = (now - obj.last_action_date).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
Use prepared data, rather than the object last action date, to determine boostfrom datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if data['last_action_date']:
today = app_timezone.localize(datetime.now()).date()
# data['last_action_date'] can be in the future
weeks_passed = (today - data['last_action_date']).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
<commit_before>from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if obj.last_action_date:
now = app_timezone.localize(datetime.now())
# obj.last_action_date can be in the future
weeks_passed = (now - obj.last_action_date).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
<commit_msg>Use prepared data, rather than the object last action date, to determine boost<commit_after>from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if data['last_action_date']:
today = app_timezone.localize(datetime.now()).date()
# data['last_action_date'] can be in the future
weeks_passed = (today - data['last_action_date']).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
91a551c0bc29d09cd2f034741c1291bfad7346db
|
tensorflow/tools/docker/jupyter_notebook_config.py
|
tensorflow/tools/docker/jupyter_notebook_config.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
import os
from IPython.lib import passwd
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
Move imports to beginning of code
|
Move imports to beginning of code
|
Python
|
apache-2.0
|
AnishShah/tensorflow,dhalleine/tensorflow,LUTAN/tensorflow,chemelnucfin/tensorflow,brchiu/tensorflow,jalexvig/tensorflow,Intel-tensorflow/tensorflow,benoitsteiner/tensorflow-xsmm,alistairlow/tensorflow,ageron/tensorflow,sandeepdsouza93/TensorFlow-15712,RapidApplicationDevelopment/tensorflow,ravindrapanda/tensorflow,rdipietro/tensorflow,rdipietro/tensorflow,guschmue/tensorflow,xzturn/tensorflow,Carmezim/tensorflow,tomasreimers/tensorflow-emscripten,jhaux/tensorflow,paolodedios/tensorflow,alheinecke/tensorflow-xsmm,aselle/tensorflow,handroissuazo/tensorflow,DavidNorman/tensorflow,markslwong/tensorflow,chris-chris/tensorflow,pcm17/tensorflow,HaebinShin/tensorflow,odejesush/tensorflow,chenjun0210/tensorflow,lukeiwanski/tensorflow,zycdragonball/tensorflow,davidzchen/tensorflow,pavelchristof/gomoku-ai,drpngx/tensorflow,rdipietro/tensorflow,LUTAN/tensorflow,code-sauce/tensorflow,adit-chandra/tensorflow,markslwong/tensorflow,HKUST-SING/tensorflow,renyi533/tensorflow,rabipanda/tensorflow,horance-liu/tensorflow,gojira/tensorflow,ghchinoy/tensorflow,bowang/tensorflow,nikste/tensorflow,DCSaunders/tensorflow,yongtang/tensorflow,Mazecreator/tensorflow,davidzchen/tensorflow,johndpope/tensorflow,seanli9jan/tensorflow,kobejean/tensorflow,benoitsteiner/tensorflow-opencl,chemelnucfin/tensorflow,TakayukiSakai/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jart/tensorflow,zycdragonball/tensorflow,apark263/tensorflow,Carmezim/tensorflow,TakayukiSakai/tensorflow,tomasreimers/tensorflow-emscripten,paolodedios/tensorflow,pcm17/tensorflow,code-sauce/tensorflow,manazhao/tf_recsys,theflofly/tensorflow,gunan/tensorflow,manipopopo/tensorflow,Mazecreator/tensorflow,with-git/tensorflow,admcrae/tensorflow,admcrae/tensorflow,JVillella/tensorflow,gunan/tensorflow,markslwong/tensorflow,tiagofrepereira2012/tensorflow,maciekcc/tensorflow,ivano666/tensorflow,yongtang/tensorflow,tornadozou/tensorflow,davidzchen/tensorflow,nikste/tensorflow,alsrgv/tensorflow,mortada/tensorflow,Intel-Corporation/tensorflow,JingJunYin/tensorflow,ishay2b/tensorflow,karllessard/tensorflow,alisidd/tensorflow,drpngx/tensorflow,snnn/tensorflow,tntnatbry/tensorflow,JVillella/tensorflow,sandeepgupta2k4/tensorflow,ibmsoe/tensorflow,alisidd/tensorflow,cancan101/tensorflow,mengxn/tensorflow,ppries/tensorflow,dendisuhubdy/tensorflow,Kongsea/tensorflow,mdrumond/tensorflow,RapidApplicationDevelopment/tensorflow,dongjoon-hyun/tensorflow,Mazecreator/tensorflow,alsrgv/tensorflow,tornadozou/tensorflow,gautam1858/tensorflow,scenarios/tensorflow,mortada/tensorflow,meteorcloudy/tensorflow,EvenStrangest/tensorflow,frreiss/tensorflow-fred,vrv/tensorflow,benoitsteiner/tensorflow-xsmm,ivano666/tensorflow,girving/tensorflow,RapidApplicationDevelopment/tensorflow,unsiloai/syntaxnet-ops-hack,strint/tensorflow,tillahoffmann/tensorflow,sandeepgupta2k4/tensorflow,raymondxyang/tensorflow,abhitopia/tensorflow,Moriadry/tensorflow,hsaputra/tensorflow,Kongsea/tensorflow,brchiu/tensorflow,anand-c-goog/tensorflow,markslwong/tensorflow,caisq/tensorflow,manazhao/tf_recsys,maciekcc/tensorflow,caisq/tensorflow,arborh/tensorflow,krikru/tensorflow-opencl,laszlocsomor/tensorflow,handroissuazo/tensorflow,strint/tensorflow,sandeepgupta2k4/tensorflow,manipopopo/tensorflow,laosiaudi/tensorflow,vrv/tensorflow,ppwwyyxx/tensorflow,asadziach/tensorflow,Mazecreator/tensorflow,AnishShah/tensorflow,rabipanda/tensorflow,zasdfgbnm/tensorflow,memo/tensorflow,sandeepgupta2k4/tensorflow,asimshankar/tensorflow,kchodorow/tensorflow,neilhan/tensorflow,Intel-tensorflow/tensorflow,raymondxyang/tensorflow,neilhan/tensorflow,hfp/tensorflow-xsmm,dhalleine/tensorflow,ibmsoe/tensorflow,nolanliou/tensorflow,asadziach/tensorflow,suiyuan2009/tensorflow,karllessard/tensorflow,gunan/tensorflow,strint/tensorflow,eerwitt/tensorflow,naturali/tensorflow,yufengg/tensorflow,gojira/tensorflow,llhe/tensorflow,Xeralux/tensorflow,SnakeJenny/TensorFlow,neilhan/tensorflow,kevin-coder/tensorflow-fork,thesuperzapper/tensorflow,HaebinShin/tensorflow,MoamerEncsConcordiaCa/tensorflow,aldian/tensorflow,jart/tensorflow,caisq/tensorflow,naturali/tensorflow,naturali/tensorflow,dongjoon-hyun/tensorflow,memo/tensorflow,Mazecreator/tensorflow,ageron/tensorflow,ran5515/DeepDecision,Bulochkin/tensorflow_pack,tongwang01/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,admcrae/tensorflow,ravindrapanda/tensorflow,ppwwyyxx/tensorflow,EvenStrangest/tensorflow,snnn/tensorflow,strint/tensorflow,ravindrapanda/tensorflow,Xeralux/tensorflow,calebfoss/tensorflow,mixturemodel-flow/tensorflow,jalexvig/tensorflow,sandeepgupta2k4/tensorflow,tornadozou/tensorflow,allenlavoie/tensorflow,benoitsteiner/tensorflow-xsmm,nikste/tensorflow,manjunaths/tensorflow,chenjun0210/tensorflow,JVillella/tensorflow,tornadozou/tensorflow,ibab/tensorflow,dendisuhubdy/tensorflow,annarev/tensorflow,alsrgv/tensorflow,eaplatanios/tensorflow,HKUST-SING/tensorflow,jhaux/tensorflow,AndreasMadsen/tensorflow,jart/tensorflow,MoamerEncsConcordiaCa/tensorflow,aam-at/tensorflow,zasdfgbnm/tensorflow,anilmuthineni/tensorflow,odejesush/tensorflow,aam-at/tensorflow,MycChiu/tensorflow,tornadozou/tensorflow,gibiansky/tensorflow,raymondxyang/tensorflow,guschmue/tensorflow,benoitsteiner/tensorflow,thjashin/tensorflow,gautam1858/tensorflow,mengxn/tensorflow,DCSaunders/tensorflow,mrry/tensorflow,laszlocsomor/tensorflow,meteorcloudy/tensorflow,hsaputra/tensorflow,Bismarrck/tensorflow,girving/tensorflow,llhe/tensorflow,Moriadry/tensorflow,yongtang/tensorflow,lukeiwanski/tensorflow-opencl,abhitopia/tensorflow,rabipanda/tensorflow,paolodedios/tensorflow,seaotterman/tensorflow,davidzchen/tensorflow,petewarden/tensorflow_makefile,dancingdan/tensorflow,Moriadry/tensorflow,with-git/tensorflow,cg31/tensorflow,cg31/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jwlawson/tensorflow,eaplatanios/tensorflow,thjashin/tensorflow,xzturn/tensorflow,Intel-tensorflow/tensorflow,ArtsiomCh/tensorflow,XueqingLin/tensorflow,jeffzheng1/tensorflow,annarev/tensorflow,elingg/tensorflow,alivecor/tensorflow,nightjean/Deep-Learning,freedomtan/tensorflow,davidzchen/tensorflow,DCSaunders/tensorflow,HKUST-SING/tensorflow,johndpope/tensorflow,annarev/tensorflow,RapidApplicationDevelopment/tensorflow,anilmuthineni/tensorflow,Mistobaan/tensorflow,eerwitt/tensorflow,thesuperzapper/tensorflow,MycChiu/tensorflow,wangyum/tensorflow,alshedivat/tensorflow,ageron/tensorflow,jhaux/tensorflow,xzturn/tensorflow,scenarios/tensorflow,scenarios/tensorflow,xzturn/tensorflow,av8ramit/tensorflow,hehongliang/tensorflow,ninotoshi/tensorflow,guschmue/tensorflow,benoitsteiner/tensorflow,LUTAN/tensorflow,pcm17/tensorflow,krikru/tensorflow-opencl,tongwang01/tensorflow,cxxgtxy/tensorflow,manipopopo/tensorflow,gibiansky/tensorflow,jhseu/tensorflow,jwlawson/tensorflow,snnn/tensorflow,Xeralux/tensorflow,tongwang01/tensorflow,code-sauce/tensorflow,zasdfgbnm/tensorflow,nburn42/tensorflow,Mistobaan/tensorflow,TakayukiSakai/tensorflow,JingJunYin/tensorflow,dyoung418/tensorflow,alsrgv/tensorflow,xodus7/tensorflow,zasdfgbnm/tensorflow,nanditav/15712-TensorFlow,pierreg/tensorflow,dancingdan/tensorflow,zasdfgbnm/tensorflow,wangyum/tensorflow,juharris/tensorflow,ArtsiomCh/tensorflow,bowang/tensorflow,asimshankar/tensorflow,alsrgv/tensorflow,taknevski/tensorflow-xsmm,memo/tensorflow,freedomtan/tensorflow,eadgarchen/tensorflow,nolanliou/tensorflow,eaplatanios/tensorflow,dendisuhubdy/tensorflow,mortada/tensorflow,Mazecreator/tensorflow,asimshankar/tensorflow,markslwong/tensorflow,gautam1858/tensorflow,laosiaudi/tensorflow,kamcpp/tensorflow,XueqingLin/tensorflow,jostep/tensorflow,mrry/tensorflow,tntnatbry/tensorflow,ppries/tensorflow,xzturn/tensorflow,ville-k/tensorflow,yongtang/tensorflow,RapidApplicationDevelopment/tensorflow,davidzchen/tensorflow,chenjun0210/tensorflow,aselle/tensorflow,neilhan/tensorflow,jhaux/tensorflow,allenlavoie/tensorflow,dhalleine/tensorflow,XueqingLin/tensorflow,ivano666/tensorflow,johndpope/tensorflow,whn09/tensorflow,tillahoffmann/tensorflow,alsrgv/tensorflow,chris-chris/tensorflow,alshedivat/tensorflow,freedomtan/tensorflow,eerwitt/tensorflow,lakshayg/tensorflow,mixturemodel-flow/tensorflow,renyi533/tensorflow,nolanliou/tensorflow,snnn/tensorflow,annarev/tensorflow,ArtsiomCh/tensorflow,a-doumoulakis/tensorflow,Xeralux/tensorflow,drpngx/tensorflow,thjashin/tensorflow,MostafaGazar/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,maciekcc/tensorflow,thjashin/tensorflow,mavenlin/tensorflow,caisq/tensorflow,ArtsiomCh/tensorflow,meteorcloudy/tensorflow,Intel-Corporation/tensorflow,gojira/tensorflow,odejesush/tensorflow,lukeiwanski/tensorflow-opencl,kobejean/tensorflow,pavelchristof/gomoku-ai,alshedivat/tensorflow,gnieboer/tensorflow,yanchen036/tensorflow,vrv/tensorflow,ibab/tensorflow,ibmsoe/tensorflow,andrewcmyers/tensorflow,juharris/tensorflow,maciekcc/tensorflow,yaroslavvb/tensorflow,SnakeJenny/TensorFlow,ghchinoy/tensorflow,dhalleine/tensorflow,tongwang01/tensorflow,chenjun0210/tensorflow,aam-at/tensorflow,benoitsteiner/tensorflow,sjperkins/tensorflow,Moriadry/tensorflow,martinwicke/tensorflow,yanchen036/tensorflow,xodus7/tensorflow,SnakeJenny/TensorFlow,bowang/tensorflow,jostep/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,kamcpp/tensorflow,Kongsea/tensorflow,girving/tensorflow,tiagofrepereira2012/tensorflow,gibiansky/tensorflow,manjunaths/tensorflow,jhseu/tensorflow,benoitsteiner/tensorflow-opencl,MostafaGazar/tensorflow,rabipanda/tensorflow,lakshayg/tensorflow,meteorcloudy/tensorflow,LUTAN/tensorflow,cancan101/tensorflow,bowang/tensorflow,mdrumond/tensorflow,tornadozou/tensorflow,thjashin/tensorflow,lukeiwanski/tensorflow,mrry/tensorflow,handroissuazo/tensorflow,zasdfgbnm/tensorflow,jalexvig/tensorflow,pavelchristof/gomoku-ai,Mistobaan/tensorflow,Bismarrck/tensorflow,ibmsoe/tensorflow,tntnatbry/tensorflow,petewarden/tensorflow,manipopopo/tensorflow,johndpope/tensorflow,arborh/tensorflow,ville-k/tensorflow,ville-k/tensorflow,rdipietro/tensorflow,ppwwyyxx/tensorflow,ychfan/tensorflow,hfp/tensorflow-xsmm,pcm17/tensorflow,DavidNorman/tensorflow,seaotterman/tensorflow,code-sauce/tensorflow,jbedorf/tensorflow,lukeiwanski/tensorflow-opencl,tomasreimers/tensorflow-emscripten,lakshayg/tensorflow,aldian/tensorflow,frreiss/tensorflow-fred,Bulochkin/tensorflow_pack,karllessard/tensorflow,horance-liu/tensorflow,frreiss/tensorflow-fred,RapidApplicationDevelopment/tensorflow,SnakeJenny/TensorFlow,markslwong/tensorflow,sarvex/tensorflow,aldian/tensorflow,adamtiger/tensorflow,manazhao/tf_recsys,yaroslavvb/tensorflow,eaplatanios/tensorflow,naturali/tensorflow,Kongsea/tensorflow,ppwwyyxx/tensorflow,ishay2b/tensorflow,yongtang/tensorflow,chenjun0210/tensorflow,gautam1858/tensorflow,pierreg/tensorflow,asimshankar/tensorflow,hsaputra/tensorflow,cxxgtxy/tensorflow,dancingdan/tensorflow,DCSaunders/tensorflow,jwlawson/tensorflow,frreiss/tensorflow-fred,gunan/tensorflow,gnieboer/tensorflow,suiyuan2009/tensorflow,andrewcmyers/tensorflow,abhitopia/tensorflow,alistairlow/tensorflow,manipopopo/tensorflow,raymondxyang/tensorflow,AnishShah/tensorflow,anilmuthineni/tensorflow,mengxn/tensorflow,snnn/tensorflow,av8ramit/tensorflow,Intel-tensorflow/tensorflow,mortada/tensorflow,arborh/tensorflow,jhseu/tensorflow,ninotoshi/tensorflow,HaebinShin/tensorflow,DavidNorman/tensorflow,laszlocsomor/tensorflow,nburn42/tensorflow,alistairlow/tensorflow,anilmuthineni/tensorflow,Intel-tensorflow/tensorflow,LUTAN/tensorflow,dongjoon-hyun/tensorflow,yaroslavvb/tensorflow,aselle/tensorflow,jart/tensorflow,mixturemodel-flow/tensorflow,mdrumond/tensorflow,renyi533/tensorflow,chris-chris/tensorflow,apark263/tensorflow,johndpope/tensorflow,cancan101/tensorflow,Bulochkin/tensorflow_pack,tensorflow/tensorflow,benoitsteiner/tensorflow,asadziach/tensorflow,Mazecreator/tensorflow,dendisuhubdy/tensorflow,rabipanda/tensorflow,Bulochkin/tensorflow_pack,ghchinoy/tensorflow,laosiaudi/tensorflow,eerwitt/tensorflow,alivecor/tensorflow,jendap/tensorflow,manjunaths/tensorflow,meteorcloudy/tensorflow,lukeiwanski/tensorflow-opencl,raymondxyang/tensorflow,allenlavoie/tensorflow,tongwang01/tensorflow,nolanliou/tensorflow,mavenlin/tensorflow,kchodorow/tensorflow,Intel-tensorflow/tensorflow,pierreg/tensorflow,wangyum/tensorflow,anand-c-goog/tensorflow,Mazecreator/tensorflow,mixturemodel-flow/tensorflow,ZhangXinNan/tensorflow,ZhangXinNan/tensorflow,sarvex/tensorflow,Carmezim/tensorflow,JingJunYin/tensorflow,ibmsoe/tensorflow,Bulochkin/tensorflow_pack,jalexvig/tensorflow,Mistobaan/tensorflow,anilmuthineni/tensorflow,EvenStrangest/tensorflow,sjperkins/tensorflow,paolodedios/tensorflow,admcrae/tensorflow,manipopopo/tensorflow,jart/tensorflow,sarvex/tensorflow,gunan/tensorflow,eerwitt/tensorflow,anand-c-goog/tensorflow,tntnatbry/tensorflow,nburn42/tensorflow,lukeiwanski/tensorflow,tomasreimers/tensorflow-emscripten,petewarden/tensorflow,whn09/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,chris-chris/tensorflow,bowang/tensorflow,nolanliou/tensorflow,theflofly/tensorflow,jendap/tensorflow,jalexvig/tensorflow,renyi533/tensorflow,calebfoss/tensorflow,alheinecke/tensorflow-xsmm,renyi533/tensorflow,martinwicke/tensorflow,ibab/tensorflow,yaroslavvb/tensorflow,manazhao/tf_recsys,mdrumond/tensorflow,nanditav/15712-TensorFlow,ibab/tensorflow,handroissuazo/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,mortada/tensorflow,dyoung418/tensorflow,freedomtan/tensorflow,chemelnucfin/tensorflow,mdrumond/tensorflow,drpngx/tensorflow,MycChiu/tensorflow,benoitsteiner/tensorflow-opencl,annarev/tensorflow,LUTAN/tensorflow,rdipietro/tensorflow,codrut3/tensorflow,jeffzheng1/tensorflow,XueqingLin/tensorflow,DavidNorman/tensorflow,Kongsea/tensorflow,av8ramit/tensorflow,tomasreimers/tensorflow-emscripten,memo/tensorflow,tensorflow/tensorflow,gojira/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ninotoshi/tensorflow,taknevski/tensorflow-xsmm,Bulochkin/tensorflow_pack,dongjoon-hyun/tensorflow,Intel-Corporation/tensorflow,kamcpp/tensorflow,ageron/tensorflow,xodus7/tensorflow,andrewcmyers/tensorflow,odejesush/tensorflow,tillahoffmann/tensorflow,xodus7/tensorflow,petewarden/tensorflow_makefile,cg31/tensorflow,mengxn/tensorflow,hsaputra/tensorflow,MostafaGazar/tensorflow,odejesush/tensorflow,Bismarrck/tensorflow,codrut3/tensorflow,tornadozou/tensorflow,alheinecke/tensorflow-xsmm,zasdfgbnm/tensorflow,jwlawson/tensorflow,kobejean/tensorflow,snnn/tensorflow,anand-c-goog/tensorflow,whn09/tensorflow,aselle/tensorflow,with-git/tensorflow,mortada/tensorflow,girving/tensorflow,Carmezim/tensorflow,aldian/tensorflow,jart/tensorflow,raymondxyang/tensorflow,lukeiwanski/tensorflow,a-doumoulakis/tensorflow,alshedivat/tensorflow,mavenlin/tensorflow,neilhan/tensorflow,Bismarrck/tensorflow,ageron/tensorflow,handroissuazo/tensorflow,meteorcloudy/tensorflow,apark263/tensorflow,girving/tensorflow,paolodedios/tensorflow,sandeepdsouza93/TensorFlow-15712,JingJunYin/tensorflow,petewarden/tensorflow,tiagofrepereira2012/tensorflow,drpngx/tensorflow,Carmezim/tensorflow,jeffzheng1/tensorflow,TakayukiSakai/tensorflow,tensorflow/tensorflow,nburn42/tensorflow,theflofly/tensorflow,alisidd/tensorflow,sjperkins/tensorflow,gnieboer/tensorflow,av8ramit/tensorflow,mdrumond/tensorflow,karllessard/tensorflow,kchodorow/tensorflow,scenarios/tensorflow,aldian/tensorflow,dyoung418/tensorflow,strint/tensorflow,unsiloai/syntaxnet-ops-hack,AnishShah/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ZhangXinNan/tensorflow,anand-c-goog/tensorflow,jbedorf/tensorflow,jostep/tensorflow,ppwwyyxx/tensorflow,adamtiger/tensorflow,chris-chris/tensorflow,petewarden/tensorflow,jendap/tensorflow,handroissuazo/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,unsiloai/syntaxnet-ops-hack,caisq/tensorflow,asimshankar/tensorflow,xodus7/tensorflow,juharris/tensorflow,Kongsea/tensorflow,bowang/tensorflow,alisidd/tensorflow,asimshankar/tensorflow,ishay2b/tensorflow,gunan/tensorflow,DCSaunders/tensorflow,ran5515/DeepDecision,abhitopia/tensorflow,jeffzheng1/tensorflow,handroissuazo/tensorflow,yufengg/tensorflow,krikru/tensorflow-opencl,ZhangXinNan/tensorflow,snnn/tensorflow,jendap/tensorflow,whn09/tensorflow,tntnatbry/tensorflow,ninotoshi/tensorflow,cg31/tensorflow,EvenStrangest/tensorflow,jhseu/tensorflow,XueqingLin/tensorflow,allenlavoie/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ishay2b/tensorflow,laszlocsomor/tensorflow,nburn42/tensorflow,jart/tensorflow,theflofly/tensorflow,dancingdan/tensorflow,cxxgtxy/tensorflow,haeusser/tensorflow,tensorflow/tensorflow,mixturemodel-flow/tensorflow,meteorcloudy/tensorflow,jart/tensorflow,meteorcloudy/tensorflow,tillahoffmann/tensorflow,jhaux/tensorflow,Carmezim/tensorflow,tiagofrepereira2012/tensorflow,frreiss/tensorflow-fred,kobejean/tensorflow,lukeiwanski/tensorflow,petewarden/tensorflow,alshedivat/tensorflow,tiagofrepereira2012/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jendap/tensorflow,tillahoffmann/tensorflow,a-doumoulakis/tensorflow,xodus7/tensorflow,gautam1858/tensorflow,alsrgv/tensorflow,dancingdan/tensorflow,apark263/tensorflow,sandeepdsouza93/TensorFlow-15712,zycdragonball/tensorflow,Bulochkin/tensorflow_pack,unsiloai/syntaxnet-ops-hack,chris-chris/tensorflow,renyi533/tensorflow,mortada/tensorflow,jhseu/tensorflow,ville-k/tensorflow,taknevski/tensorflow-xsmm,tensorflow/tensorflow-experimental_link_static_libraries_once,asimshankar/tensorflow,krikru/tensorflow-opencl,chenjun0210/tensorflow,Carmezim/tensorflow,yaroslavvb/tensorflow,benoitsteiner/tensorflow-xsmm,alivecor/tensorflow,alisidd/tensorflow,theflofly/tensorflow,tiagofrepereira2012/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,adamtiger/tensorflow,dancingdan/tensorflow,MoamerEncsConcordiaCa/tensorflow,kamcpp/tensorflow,pavelchristof/gomoku-ai,anand-c-goog/tensorflow,jostep/tensorflow,tensorflow/tensorflow-pywrap_saved_model,kevin-coder/tensorflow-fork,yanchen036/tensorflow,chemelnucfin/tensorflow,nanditav/15712-TensorFlow,Xeralux/tensorflow,aldian/tensorflow,seanli9jan/tensorflow,sjperkins/tensorflow,XueqingLin/tensorflow,adit-chandra/tensorflow,renyi533/tensorflow,TakayukiSakai/tensorflow,MostafaGazar/tensorflow,freedomtan/tensorflow,xzturn/tensorflow,mavenlin/tensorflow,nightjean/Deep-Learning,hehongliang/tensorflow,haeusser/tensorflow,alshedivat/tensorflow,SnakeJenny/TensorFlow,Bismarrck/tensorflow,sandeepdsouza93/TensorFlow-15712,adit-chandra/tensorflow,petewarden/tensorflow_makefile,codrut3/tensorflow,llhe/tensorflow,MoamerEncsConcordiaCa/tensorflow,seaotterman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,eadgarchen/tensorflow,allenlavoie/tensorflow,a-doumoulakis/tensorflow,cxxgtxy/tensorflow,ville-k/tensorflow,annarev/tensorflow,anilmuthineni/tensorflow,pcm17/tensorflow,raymondxyang/tensorflow,haeusser/tensorflow,Bulochkin/tensorflow_pack,jalexvig/tensorflow,alheinecke/tensorflow-xsmm,Intel-Corporation/tensorflow,kamcpp/tensorflow,aselle/tensorflow,benoitsteiner/tensorflow-xsmm,AndreasMadsen/tensorflow,haeusser/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,AndreasMadsen/tensorflow,chemelnucfin/tensorflow,wangyum/tensorflow,nburn42/tensorflow,nburn42/tensorflow,tensorflow/tensorflow-pywrap_saved_model,code-sauce/tensorflow,alshedivat/tensorflow,jbedorf/tensorflow,taknevski/tensorflow-xsmm,brchiu/tensorflow,memo/tensorflow,asadziach/tensorflow,manazhao/tf_recsys,Xeralux/tensorflow,taknevski/tensorflow-xsmm,laosiaudi/tensorflow,AndreasMadsen/tensorflow,codrut3/tensorflow,kobejean/tensorflow,rabipanda/tensorflow,mrry/tensorflow,tntnatbry/tensorflow,sandeepgupta2k4/tensorflow,gautam1858/tensorflow,eaplatanios/tensorflow,tomasreimers/tensorflow-emscripten,AndreasMadsen/tensorflow,petewarden/tensorflow,brchiu/tensorflow,MycChiu/tensorflow,aldian/tensorflow,gunan/tensorflow,horance-liu/tensorflow,kamcpp/tensorflow,jhseu/tensorflow,asimshankar/tensorflow,vrv/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,johndpope/tensorflow,dancingdan/tensorflow,gnieboer/tensorflow,chenjun0210/tensorflow,kchodorow/tensorflow,taknevski/tensorflow-xsmm,guschmue/tensorflow,raymondxyang/tensorflow,Mistobaan/tensorflow,aam-at/tensorflow,ppwwyyxx/tensorflow,benoitsteiner/tensorflow,scenarios/tensorflow,tntnatbry/tensorflow,hfp/tensorflow-xsmm,manipopopo/tensorflow,anand-c-goog/tensorflow,adit-chandra/tensorflow,alistairlow/tensorflow,sandeepgupta2k4/tensorflow,asadziach/tensorflow,MostafaGazar/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,freedomtan/tensorflow,krikru/tensorflow-opencl,alshedivat/tensorflow,MostafaGazar/tensorflow,hehongliang/tensorflow,llhe/tensorflow,pierreg/tensorflow,eaplatanios/tensorflow,dyoung418/tensorflow,alshedivat/tensorflow,hsaputra/tensorflow,thesuperzapper/tensorflow,aam-at/tensorflow,Bismarrck/tensorflow,tillahoffmann/tensorflow,rdipietro/tensorflow,nanditav/15712-TensorFlow,aam-at/tensorflow,ychfan/tensorflow,benoitsteiner/tensorflow-opencl,ppries/tensorflow,annarev/tensorflow,nanditav/15712-TensorFlow,brchiu/tensorflow,paolodedios/tensorflow,hsaputra/tensorflow,ravindrapanda/tensorflow,ZhangXinNan/tensorflow,dongjoon-hyun/tensorflow,pierreg/tensorflow,calebfoss/tensorflow,girving/tensorflow,AnishShah/tensorflow,davidzchen/tensorflow,jbedorf/tensorflow,dhalleine/tensorflow,seanli9jan/tensorflow,theflofly/tensorflow,ZhangXinNan/tensorflow,pcm17/tensorflow,JingJunYin/tensorflow,ArtsiomCh/tensorflow,Mistobaan/tensorflow,jalexvig/tensorflow,laszlocsomor/tensorflow,mrry/tensorflow,llhe/tensorflow,guschmue/tensorflow,yaroslavvb/tensorflow,with-git/tensorflow,ychfan/tensorflow,nightjean/Deep-Learning,thjashin/tensorflow,adit-chandra/tensorflow,drpngx/tensorflow,andrewcmyers/tensorflow,aam-at/tensorflow,thesuperzapper/tensorflow,nburn42/tensorflow,odejesush/tensorflow,mengxn/tensorflow,ppries/tensorflow,HKUST-SING/tensorflow,lakshayg/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,hfp/tensorflow-xsmm,zasdfgbnm/tensorflow,allenlavoie/tensorflow,sarvex/tensorflow,codrut3/tensorflow,RapidApplicationDevelopment/tensorflow,laosiaudi/tensorflow,hsaputra/tensorflow,adit-chandra/tensorflow,cancan101/tensorflow,freedomtan/tensorflow,jart/tensorflow,alheinecke/tensorflow-xsmm,snnn/tensorflow,ageron/tensorflow,jwlawson/tensorflow,nikste/tensorflow,cancan101/tensorflow,benoitsteiner/tensorflow-xsmm,abhitopia/tensorflow,martinwicke/tensorflow,tntnatbry/tensorflow,abhitopia/tensorflow,eerwitt/tensorflow,ychfan/tensorflow,mengxn/tensorflow,MoamerEncsConcordiaCa/tensorflow,rabipanda/tensorflow,benoitsteiner/tensorflow-xsmm,naturali/tensorflow,davidzchen/tensorflow,laszlocsomor/tensorflow,benoitsteiner/tensorflow-xsmm,ychfan/tensorflow,manjunaths/tensorflow,allenlavoie/tensorflow,petewarden/tensorflow_makefile,lukeiwanski/tensorflow,admcrae/tensorflow,alheinecke/tensorflow-xsmm,RapidApplicationDevelopment/tensorflow,hehongliang/tensorflow,TakayukiSakai/tensorflow,dyoung418/tensorflow,hfp/tensorflow-xsmm,ZhangXinNan/tensorflow,JingJunYin/tensorflow,renyi533/tensorflow,asadziach/tensorflow,HaebinShin/tensorflow,code-sauce/tensorflow,guschmue/tensorflow,horance-liu/tensorflow,zycdragonball/tensorflow,Xeralux/tensorflow,AnishShah/tensorflow,suiyuan2009/tensorflow,eadgarchen/tensorflow,yanchen036/tensorflow,ghchinoy/tensorflow,krikru/tensorflow-opencl,petewarden/tensorflow,mdrumond/tensorflow,mortada/tensorflow,EvenStrangest/tensorflow,alsrgv/tensorflow,kchodorow/tensorflow,XueqingLin/tensorflow,jendap/tensorflow,lukeiwanski/tensorflow,seaotterman/tensorflow,annarev/tensorflow,DCSaunders/tensorflow,dongjoon-hyun/tensorflow,eadgarchen/tensorflow,thesuperzapper/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jendap/tensorflow,DavidNorman/tensorflow,whn09/tensorflow,EvenStrangest/tensorflow,elingg/tensorflow,markslwong/tensorflow,tensorflow/tensorflow,martinwicke/tensorflow,llhe/tensorflow,laosiaudi/tensorflow,martinwicke/tensorflow,freedomtan/tensorflow,elingg/tensorflow,ishay2b/tensorflow,naturali/tensorflow,cancan101/tensorflow,johndpope/tensorflow,MoamerEncsConcordiaCa/tensorflow,Bismarrck/tensorflow,chris-chris/tensorflow,sarvex/tensorflow,ppries/tensorflow,chemelnucfin/tensorflow,Intel-Corporation/tensorflow,ageron/tensorflow,benoitsteiner/tensorflow-opencl,asadziach/tensorflow,jwlawson/tensorflow,cg31/tensorflow,adit-chandra/tensorflow,jeffzheng1/tensorflow,dyoung418/tensorflow,alheinecke/tensorflow-xsmm,whn09/tensorflow,AndreasMadsen/tensorflow,aselle/tensorflow,whn09/tensorflow,juharris/tensorflow,nikste/tensorflow,Carmezim/tensorflow,nburn42/tensorflow,odejesush/tensorflow,mrry/tensorflow,girving/tensorflow,JingJunYin/tensorflow,codrut3/tensorflow,EvenStrangest/tensorflow,Bismarrck/tensorflow,apark263/tensorflow,caisq/tensorflow,Bismarrck/tensorflow,paolodedios/tensorflow,caisq/tensorflow,with-git/tensorflow,eerwitt/tensorflow,Xeralux/tensorflow,jbedorf/tensorflow,karllessard/tensorflow,theflofly/tensorflow,gojira/tensorflow,ivano666/tensorflow,mdrumond/tensorflow,adit-chandra/tensorflow,juharris/tensorflow,jbedorf/tensorflow,taknevski/tensorflow-xsmm,alisidd/tensorflow,pierreg/tensorflow,kobejean/tensorflow,gibiansky/tensorflow,HKUST-SING/tensorflow,sarvex/tensorflow,nolanliou/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alisidd/tensorflow,neilhan/tensorflow,nightjean/Deep-Learning,manazhao/tf_recsys,tensorflow/tensorflow-pywrap_saved_model,tiagofrepereira2012/tensorflow,pcm17/tensorflow,asadziach/tensorflow,benoitsteiner/tensorflow,arborh/tensorflow,sarvex/tensorflow,benoitsteiner/tensorflow,memo/tensorflow,AnishShah/tensorflow,jwlawson/tensorflow,jalexvig/tensorflow,martinwicke/tensorflow,tensorflow/tensorflow-pywrap_saved_model,pavelchristof/gomoku-ai,code-sauce/tensorflow,jostep/tensorflow,davidzchen/tensorflow,krikru/tensorflow-opencl,yongtang/tensorflow,manjunaths/tensorflow,av8ramit/tensorflow,gibiansky/tensorflow,ravindrapanda/tensorflow,codrut3/tensorflow,maciekcc/tensorflow,pavelchristof/gomoku-ai,anilmuthineni/tensorflow,hsaputra/tensorflow,ghchinoy/tensorflow,gojira/tensorflow,sandeepgupta2k4/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,apark263/tensorflow,laszlocsomor/tensorflow,ppries/tensorflow,abhitopia/tensorflow,zasdfgbnm/tensorflow,tensorflow/tensorflow-pywrap_saved_model,manipopopo/tensorflow,benoitsteiner/tensorflow-xsmm,alistairlow/tensorflow,Carmezim/tensorflow,rabipanda/tensorflow,sjperkins/tensorflow,eaplatanios/tensorflow,ran5515/DeepDecision,arborh/tensorflow,petewarden/tensorflow,guschmue/tensorflow,codrut3/tensorflow,ageron/tensorflow,alheinecke/tensorflow-xsmm,kevin-coder/tensorflow-fork,lukeiwanski/tensorflow-opencl,MycChiu/tensorflow,strint/tensorflow,nightjean/Deep-Learning,apark263/tensorflow,karllessard/tensorflow,jwlawson/tensorflow,cxxgtxy/tensorflow,vrv/tensorflow,sandeepdsouza93/TensorFlow-15712,seaotterman/tensorflow,tensorflow/tensorflow,ravindrapanda/tensorflow,tensorflow/tensorflow,unsiloai/syntaxnet-ops-hack,zasdfgbnm/tensorflow,mengxn/tensorflow,alistairlow/tensorflow,gojira/tensorflow,jwlawson/tensorflow,XueqingLin/tensorflow,rdipietro/tensorflow,hfp/tensorflow-xsmm,frreiss/tensorflow-fred,karllessard/tensorflow,code-sauce/tensorflow,Kongsea/tensorflow,juharris/tensorflow,SnakeJenny/TensorFlow,mavenlin/tensorflow,Intel-Corporation/tensorflow,a-doumoulakis/tensorflow,chemelnucfin/tensorflow,davidzchen/tensorflow,gojira/tensorflow,arborh/tensorflow,aselle/tensorflow,rdipietro/tensorflow,Intel-Corporation/tensorflow,kevin-coder/tensorflow-fork,nolanliou/tensorflow,dendisuhubdy/tensorflow,codrut3/tensorflow,ravindrapanda/tensorflow,gibiansky/tensorflow,Intel-Corporation/tensorflow,dhalleine/tensorflow,frreiss/tensorflow-fred,mixturemodel-flow/tensorflow,jalexvig/tensorflow,adit-chandra/tensorflow,haeusser/tensorflow,bowang/tensorflow,RapidApplicationDevelopment/tensorflow,HKUST-SING/tensorflow,alivecor/tensorflow,ppwwyyxx/tensorflow,MostafaGazar/tensorflow,cxxgtxy/tensorflow,vrv/tensorflow,theflofly/tensorflow,ninotoshi/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow,haeusser/tensorflow,benoitsteiner/tensorflow-opencl,petewarden/tensorflow_makefile,vrv/tensorflow,xzturn/tensorflow,ppwwyyxx/tensorflow,jeffzheng1/tensorflow,dancingdan/tensorflow,pcm17/tensorflow,ville-k/tensorflow,dendisuhubdy/tensorflow,AnishShah/tensorflow,markslwong/tensorflow,thjashin/tensorflow,apark263/tensorflow,ZhangXinNan/tensorflow,dhalleine/tensorflow,yaroslavvb/tensorflow,DavidNorman/tensorflow,alheinecke/tensorflow-xsmm,alshedivat/tensorflow,AndreasMadsen/tensorflow,ppries/tensorflow,sjperkins/tensorflow,brchiu/tensorflow,nanditav/15712-TensorFlow,yufengg/tensorflow,laosiaudi/tensorflow,vrv/tensorflow,Mistobaan/tensorflow,ychfan/tensorflow,ville-k/tensorflow,Mazecreator/tensorflow,hfp/tensorflow-xsmm,eaplatanios/tensorflow,guschmue/tensorflow,chemelnucfin/tensorflow,martinwicke/tensorflow,ran5515/DeepDecision,ville-k/tensorflow,adamtiger/tensorflow,dhalleine/tensorflow,ghchinoy/tensorflow,rabipanda/tensorflow,wangyum/tensorflow,Intel-tensorflow/tensorflow,with-git/tensorflow,nanditav/15712-TensorFlow,MostafaGazar/tensorflow,cg31/tensorflow,freedomtan/tensorflow,AnishShah/tensorflow,tomasreimers/tensorflow-emscripten,jbedorf/tensorflow,ppwwyyxx/tensorflow,seaotterman/tensorflow,alsrgv/tensorflow,xodus7/tensorflow,jendap/tensorflow,ivano666/tensorflow,kchodorow/tensorflow,andrewcmyers/tensorflow,suiyuan2009/tensorflow,SnakeJenny/TensorFlow,pavelchristof/gomoku-ai,gautam1858/tensorflow,ghchinoy/tensorflow,jart/tensorflow,lakshayg/tensorflow,benoitsteiner/tensorflow-xsmm,johndpope/tensorflow,sandeepdsouza93/TensorFlow-15712,xzturn/tensorflow,MoamerEncsConcordiaCa/tensorflow,snnn/tensorflow,xzturn/tensorflow,jwlawson/tensorflow,jostep/tensorflow,memo/tensorflow,mrry/tensorflow,caisq/tensorflow,kamcpp/tensorflow,thesuperzapper/tensorflow,strint/tensorflow,llhe/tensorflow,LUTAN/tensorflow,ibab/tensorflow,HaebinShin/tensorflow,frreiss/tensorflow-fred,mengxn/tensorflow,mengxn/tensorflow,sandeepgupta2k4/tensorflow,tensorflow/tensorflow-pywrap_saved_model,arborh/tensorflow,allenlavoie/tensorflow,johndpope/tensorflow,annarev/tensorflow,jbedorf/tensorflow,DavidNorman/tensorflow,nikste/tensorflow,ishay2b/tensorflow,JingJunYin/tensorflow,HaebinShin/tensorflow,rabipanda/tensorflow,MostafaGazar/tensorflow,alivecor/tensorflow,krikru/tensorflow-opencl,sandeepdsouza93/TensorFlow-15712,chemelnucfin/tensorflow,kamcpp/tensorflow,ageron/tensorflow,chenjun0210/tensorflow,a-doumoulakis/tensorflow,llhe/tensorflow,TakayukiSakai/tensorflow,kobejean/tensorflow,hfp/tensorflow-xsmm,cg31/tensorflow,caisq/tensorflow,hehongliang/tensorflow,lukeiwanski/tensorflow-opencl,calebfoss/tensorflow,rabipanda/tensorflow,renyi533/tensorflow,nburn42/tensorflow,tillahoffmann/tensorflow,Bismarrck/tensorflow,JVillella/tensorflow,mrry/tensorflow,dancingdan/tensorflow,alistairlow/tensorflow,aam-at/tensorflow,asimshankar/tensorflow,jhseu/tensorflow,ppwwyyxx/tensorflow,cg31/tensorflow,jhseu/tensorflow,Moriadry/tensorflow,drpngx/tensorflow,caisq/tensorflow,apark263/tensorflow,eaplatanios/tensorflow,guschmue/tensorflow,yufengg/tensorflow,ville-k/tensorflow,av8ramit/tensorflow,petewarden/tensorflow,arborh/tensorflow,gnieboer/tensorflow,arborh/tensorflow,eadgarchen/tensorflow,tntnatbry/tensorflow,dongjoon-hyun/tensorflow,jbedorf/tensorflow,nanditav/15712-TensorFlow,xzturn/tensorflow,gojira/tensorflow,anilmuthineni/tensorflow,andrewcmyers/tensorflow,asadziach/tensorflow,theflofly/tensorflow,drpngx/tensorflow,alistairlow/tensorflow,ZhangXinNan/tensorflow,adit-chandra/tensorflow,maciekcc/tensorflow,yanchen036/tensorflow,jeffzheng1/tensorflow,ychfan/tensorflow,HaebinShin/tensorflow,xodus7/tensorflow,Mistobaan/tensorflow,memo/tensorflow,martinwicke/tensorflow,tensorflow/tensorflow,ArtsiomCh/tensorflow,codrut3/tensorflow,thjashin/tensorflow,jhaux/tensorflow,eerwitt/tensorflow,seanli9jan/tensorflow,jbedorf/tensorflow,frreiss/tensorflow-fred,alisidd/tensorflow,nightjean/Deep-Learning,gibiansky/tensorflow,AnishShah/tensorflow,renyi533/tensorflow,manipopopo/tensorflow,krikru/tensorflow-opencl,mavenlin/tensorflow,calebfoss/tensorflow,andrewcmyers/tensorflow,mrry/tensorflow,seanli9jan/tensorflow,eadgarchen/tensorflow,karllessard/tensorflow,tomasreimers/tensorflow-emscripten,paolodedios/tensorflow,unsiloai/syntaxnet-ops-hack,ishay2b/tensorflow,girving/tensorflow,ageron/tensorflow,manjunaths/tensorflow,xodus7/tensorflow,snnn/tensorflow,admcrae/tensorflow,gunan/tensorflow,MoamerEncsConcordiaCa/tensorflow,wangyum/tensorflow,whn09/tensorflow,ibab/tensorflow,benoitsteiner/tensorflow,strint/tensorflow,rdipietro/tensorflow,aam-at/tensorflow,elingg/tensorflow,JVillella/tensorflow,asimshankar/tensorflow,jhaux/tensorflow,allenlavoie/tensorflow,Bismarrck/tensorflow,ibmsoe/tensorflow,karllessard/tensorflow,alistairlow/tensorflow,alivecor/tensorflow,seanli9jan/tensorflow,gautam1858/tensorflow,with-git/tensorflow,ran5515/DeepDecision,Xeralux/tensorflow,suiyuan2009/tensorflow,jhaux/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,manipopopo/tensorflow,LUTAN/tensorflow,DavidNorman/tensorflow,seaotterman/tensorflow,jeffzheng1/tensorflow,freedomtan/tensorflow,lukeiwanski/tensorflow-opencl,kchodorow/tensorflow,renyi533/tensorflow,jwlawson/tensorflow,alivecor/tensorflow,mortada/tensorflow,hehongliang/tensorflow,ppries/tensorflow,kevin-coder/tensorflow-fork,jhseu/tensorflow,xzturn/tensorflow,horance-liu/tensorflow,lukeiwanski/tensorflow,eadgarchen/tensorflow,dyoung418/tensorflow,jalexvig/tensorflow,admcrae/tensorflow,davidzchen/tensorflow,dancingdan/tensorflow,kevin-coder/tensorflow-fork,tillahoffmann/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,arborh/tensorflow,ibab/tensorflow,apark263/tensorflow,gunan/tensorflow,manjunaths/tensorflow,xodus7/tensorflow,hfp/tensorflow-xsmm,cg31/tensorflow,girving/tensorflow,calebfoss/tensorflow,jhseu/tensorflow,kchodorow/tensorflow,johndpope/tensorflow,gojira/tensorflow,ibmsoe/tensorflow,sjperkins/tensorflow,gibiansky/tensorflow,wangyum/tensorflow,drpngx/tensorflow,annarev/tensorflow,av8ramit/tensorflow,yongtang/tensorflow,Bulochkin/tensorflow_pack,horance-liu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,laszlocsomor/tensorflow,brchiu/tensorflow,seaotterman/tensorflow,tongwang01/tensorflow,cancan101/tensorflow,alistairlow/tensorflow,benoitsteiner/tensorflow-opencl,elingg/tensorflow,martinwicke/tensorflow,jhseu/tensorflow,cancan101/tensorflow,MycChiu/tensorflow,petewarden/tensorflow,dongjoon-hyun/tensorflow,EvenStrangest/tensorflow,AnishShah/tensorflow,seanli9jan/tensorflow,kchodorow/tensorflow,karllessard/tensorflow,av8ramit/tensorflow,lukeiwanski/tensorflow,ninotoshi/tensorflow,ghchinoy/tensorflow,benoitsteiner/tensorflow,dendisuhubdy/tensorflow,jhseu/tensorflow,dongjoon-hyun/tensorflow,apark263/tensorflow,chemelnucfin/tensorflow,ychfan/tensorflow,strint/tensorflow,XueqingLin/tensorflow,renyi533/tensorflow,ivano666/tensorflow,DavidNorman/tensorflow,AndreasMadsen/tensorflow,sjperkins/tensorflow,chemelnucfin/tensorflow,kevin-coder/tensorflow-fork,seanli9jan/tensorflow,thesuperzapper/tensorflow,scenarios/tensorflow,ageron/tensorflow,ppwwyyxx/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,tornadozou/tensorflow,aam-at/tensorflow,ArtsiomCh/tensorflow,allenlavoie/tensorflow,thesuperzapper/tensorflow,DCSaunders/tensorflow,wangyum/tensorflow,yufengg/tensorflow,ArtsiomCh/tensorflow,horance-liu/tensorflow,DCSaunders/tensorflow,gnieboer/tensorflow,juharris/tensorflow,nikste/tensorflow,a-doumoulakis/tensorflow,JVillella/tensorflow,yaroslavvb/tensorflow,HKUST-SING/tensorflow,HKUST-SING/tensorflow,karllessard/tensorflow,aldian/tensorflow,adit-chandra/tensorflow,allenlavoie/tensorflow,brchiu/tensorflow,petewarden/tensorflow_makefile,eaplatanios/tensorflow,laosiaudi/tensorflow,ibab/tensorflow,handroissuazo/tensorflow,laszlocsomor/tensorflow,maciekcc/tensorflow,elingg/tensorflow,sandeepdsouza93/TensorFlow-15712,andrewcmyers/tensorflow,admcrae/tensorflow,ghchinoy/tensorflow,elingg/tensorflow,aam-at/tensorflow,code-sauce/tensorflow,chemelnucfin/tensorflow,dancingdan/tensorflow,MycChiu/tensorflow,Kongsea/tensorflow,calebfoss/tensorflow,neilhan/tensorflow,dyoung418/tensorflow,abhitopia/tensorflow,Moriadry/tensorflow,sarvex/tensorflow,yaroslavvb/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,llhe/tensorflow,arborh/tensorflow,adamtiger/tensorflow,brchiu/tensorflow,yongtang/tensorflow,nightjean/Deep-Learning,MoamerEncsConcordiaCa/tensorflow,tensorflow/tensorflow-pywrap_saved_model,girving/tensorflow,sjperkins/tensorflow,unsiloai/syntaxnet-ops-hack,JingJunYin/tensorflow,theflofly/tensorflow,yongtang/tensorflow,xzturn/tensorflow,bowang/tensorflow,HKUST-SING/tensorflow,JVillella/tensorflow,DCSaunders/tensorflow,mixturemodel-flow/tensorflow,ivano666/tensorflow,ran5515/DeepDecision,yanchen036/tensorflow,ibmsoe/tensorflow,anilmuthineni/tensorflow,alshedivat/tensorflow,nolanliou/tensorflow,paolodedios/tensorflow,dendisuhubdy/tensorflow,aselle/tensorflow,jostep/tensorflow,av8ramit/tensorflow,odejesush/tensorflow,kamcpp/tensorflow,odejesush/tensorflow,seanli9jan/tensorflow,meteorcloudy/tensorflow,drpngx/tensorflow,jendap/tensorflow,eaplatanios/tensorflow,calebfoss/tensorflow,jbedorf/tensorflow,JingJunYin/tensorflow,ppries/tensorflow,maciekcc/tensorflow,dendisuhubdy/tensorflow,ran5515/DeepDecision,tongwang01/tensorflow,scenarios/tensorflow,petewarden/tensorflow,mavenlin/tensorflow,chenjun0210/tensorflow,petewarden/tensorflow_makefile,DavidNorman/tensorflow,lukeiwanski/tensorflow-opencl,hsaputra/tensorflow,haeusser/tensorflow,eerwitt/tensorflow,ghchinoy/tensorflow,laosiaudi/tensorflow,Mistobaan/tensorflow,aselle/tensorflow,admcrae/tensorflow,paolodedios/tensorflow,kevin-coder/tensorflow-fork,naturali/tensorflow,gunan/tensorflow,llhe/tensorflow,Xeralux/tensorflow,DavidNorman/tensorflow,yanchen036/tensorflow,LUTAN/tensorflow,seanli9jan/tensorflow,jostep/tensorflow,brchiu/tensorflow,kevin-coder/tensorflow-fork,scenarios/tensorflow,davidzchen/tensorflow,eadgarchen/tensorflow,SnakeJenny/TensorFlow,gunan/tensorflow,pierreg/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,benoitsteiner/tensorflow,unsiloai/syntaxnet-ops-hack,ghchinoy/tensorflow,DavidNorman/tensorflow,alisidd/tensorflow,pierreg/tensorflow,freedomtan/tensorflow,mixturemodel-flow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gibiansky/tensorflow,gnieboer/tensorflow,theflofly/tensorflow,aselle/tensorflow,hehongliang/tensorflow,jendap/tensorflow,sandeepdsouza93/TensorFlow-15712,frreiss/tensorflow-fred,lakshayg/tensorflow,jhaux/tensorflow,handroissuazo/tensorflow,wangyum/tensorflow,ninotoshi/tensorflow,freedomtan/tensorflow,ville-k/tensorflow,nolanliou/tensorflow,jbedorf/tensorflow,alsrgv/tensorflow,lukeiwanski/tensorflow,yufengg/tensorflow,tensorflow/tensorflow-pywrap_saved_model,elingg/tensorflow,jalexvig/tensorflow,ravindrapanda/tensorflow,snnn/tensorflow,nightjean/Deep-Learning,tomasreimers/tensorflow-emscripten,manjunaths/tensorflow,alsrgv/tensorflow,horance-liu/tensorflow,asimshankar/tensorflow,cancan101/tensorflow,ninotoshi/tensorflow,Moriadry/tensorflow,thesuperzapper/tensorflow,chris-chris/tensorflow,dongjoon-hyun/tensorflow,manjunaths/tensorflow,cxxgtxy/tensorflow,elingg/tensorflow,tongwang01/tensorflow,Bulochkin/tensorflow_pack,taknevski/tensorflow-xsmm,hfp/tensorflow-xsmm,lukeiwanski/tensorflow-opencl,zycdragonball/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,horance-liu/tensorflow,hfp/tensorflow-xsmm,TakayukiSakai/tensorflow,ravindrapanda/tensorflow,gautam1858/tensorflow,pcm17/tensorflow,DCSaunders/tensorflow,MycChiu/tensorflow,zycdragonball/tensorflow,HaebinShin/tensorflow,kevin-coder/tensorflow-fork,xodus7/tensorflow,sjperkins/tensorflow,arborh/tensorflow,kobejean/tensorflow,tiagofrepereira2012/tensorflow,av8ramit/tensorflow,whn09/tensorflow,tensorflow/tensorflow,calebfoss/tensorflow,alsrgv/tensorflow,Bulochkin/tensorflow_pack,laszlocsomor/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,neilhan/tensorflow,memo/tensorflow,nikste/tensorflow,chris-chris/tensorflow,annarev/tensorflow,av8ramit/tensorflow,naturali/tensorflow,a-doumoulakis/tensorflow,ivano666/tensorflow,girving/tensorflow,dendisuhubdy/tensorflow,guschmue/tensorflow,dongjoon-hyun/tensorflow,Xeralux/tensorflow,petewarden/tensorflow_makefile,suiyuan2009/tensorflow,ageron/tensorflow,zasdfgbnm/tensorflow,gnieboer/tensorflow,nburn42/tensorflow,AndreasMadsen/tensorflow,eadgarchen/tensorflow,kevin-coder/tensorflow-fork,alivecor/tensorflow,eadgarchen/tensorflow,vrv/tensorflow,Mistobaan/tensorflow,nanditav/15712-TensorFlow,anand-c-goog/tensorflow,benoitsteiner/tensorflow-opencl,zycdragonball/tensorflow,benoitsteiner/tensorflow-opencl,Bulochkin/tensorflow_pack,anand-c-goog/tensorflow,lakshayg/tensorflow,adamtiger/tensorflow,Moriadry/tensorflow,yanchen036/tensorflow,ibmsoe/tensorflow,gautam1858/tensorflow,jendap/tensorflow,with-git/tensorflow,ZhangXinNan/tensorflow,gnieboer/tensorflow,cxxgtxy/tensorflow,thjashin/tensorflow,MycChiu/tensorflow,neilhan/tensorflow,nolanliou/tensorflow,horance-liu/tensorflow,benoitsteiner/tensorflow-xsmm,seaotterman/tensorflow,pavelchristof/gomoku-ai,ZhangXinNan/tensorflow,aselle/tensorflow,jhaux/tensorflow,Mistobaan/tensorflow,kobejean/tensorflow,ychfan/tensorflow,brchiu/tensorflow,taknevski/tensorflow-xsmm,abhitopia/tensorflow,juharris/tensorflow,seanli9jan/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,nikste/tensorflow,theflofly/tensorflow,gojira/tensorflow,hsaputra/tensorflow,mdrumond/tensorflow,lakshayg/tensorflow,suiyuan2009/tensorflow,jeffzheng1/tensorflow,adamtiger/tensorflow,haeusser/tensorflow,markslwong/tensorflow,meteorcloudy/tensorflow,scenarios/tensorflow,haeusser/tensorflow,manazhao/tf_recsys,mavenlin/tensorflow,kobejean/tensorflow,yufengg/tensorflow,gunan/tensorflow
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
import os
from IPython.lib import passwd
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
Move imports to beginning of code
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
<commit_before># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
import os
from IPython.lib import passwd
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
<commit_msg>Move imports to beginning of code<commit_after>
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
import os
from IPython.lib import passwd
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
Move imports to beginning of code# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
<commit_before># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
import os
from IPython.lib import passwd
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
<commit_msg>Move imports to beginning of code<commit_after># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
dd51e13a2a7b4e4005127ca0e409d0882179b39f
|
bluebottle/mail/__init__.py
|
bluebottle/mail/__init__.py
|
from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
msg = EmailMultiAlternatives(subject=subject, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
|
from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
from bluebottle.clients import properties
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
from_email = properties.CONTACT_EMAIL
msg = EmailMultiAlternatives(subject=subject, from_email=from_email, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
|
Use CONTACT_EMAIL als default from address
|
Use CONTACT_EMAIL als default from address
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle
|
from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
msg = EmailMultiAlternatives(subject=subject, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
Use CONTACT_EMAIL als default from address
|
from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
from bluebottle.clients import properties
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
from_email = properties.CONTACT_EMAIL
msg = EmailMultiAlternatives(subject=subject, from_email=from_email, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
|
<commit_before>from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
msg = EmailMultiAlternatives(subject=subject, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
<commit_msg>Use CONTACT_EMAIL als default from address<commit_after>
|
from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
from bluebottle.clients import properties
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
from_email = properties.CONTACT_EMAIL
msg = EmailMultiAlternatives(subject=subject, from_email=from_email, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
|
from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
msg = EmailMultiAlternatives(subject=subject, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
Use CONTACT_EMAIL als default from addressfrom django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
from bluebottle.clients import properties
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
from_email = properties.CONTACT_EMAIL
msg = EmailMultiAlternatives(subject=subject, from_email=from_email, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
|
<commit_before>from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
msg = EmailMultiAlternatives(subject=subject, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
<commit_msg>Use CONTACT_EMAIL als default from address<commit_after>from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
from bluebottle.clients import properties
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
from_email = properties.CONTACT_EMAIL
msg = EmailMultiAlternatives(subject=subject, from_email=from_email, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
|
606e9731bdcaf61f1358a5fc5341c85c83d18370
|
IPython/config/profile/sympy/ipython_config.py
|
IPython/config/profile/sympy/ipython_config.py
|
c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('xyz')
k, m, n = symbols('kmn', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
|
c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('x,y,z')
k, m, n = symbols('k,m,n', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
|
Fix sympy profile to work with sympy 0.7.
|
Fix sympy profile to work with sympy 0.7.
Sympy 0.7 no longer supports x,y,z = symbols('xyz').
symbols ('xyz') is now a single symbol 'xyz'.
Change the sympy profile to symbols(x,y,z).
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('xyz')
k, m, n = symbols('kmn', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
Fix sympy profile to work with sympy 0.7.
Sympy 0.7 no longer supports x,y,z = symbols('xyz').
symbols ('xyz') is now a single symbol 'xyz'.
Change the sympy profile to symbols(x,y,z).
|
c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('x,y,z')
k, m, n = symbols('k,m,n', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
|
<commit_before>c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('xyz')
k, m, n = symbols('kmn', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
<commit_msg>Fix sympy profile to work with sympy 0.7.
Sympy 0.7 no longer supports x,y,z = symbols('xyz').
symbols ('xyz') is now a single symbol 'xyz'.
Change the sympy profile to symbols(x,y,z).<commit_after>
|
c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('x,y,z')
k, m, n = symbols('k,m,n', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
|
c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('xyz')
k, m, n = symbols('kmn', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
Fix sympy profile to work with sympy 0.7.
Sympy 0.7 no longer supports x,y,z = symbols('xyz').
symbols ('xyz') is now a single symbol 'xyz'.
Change the sympy profile to symbols(x,y,z).c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('x,y,z')
k, m, n = symbols('k,m,n', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
|
<commit_before>c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('xyz')
k, m, n = symbols('kmn', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
<commit_msg>Fix sympy profile to work with sympy 0.7.
Sympy 0.7 no longer supports x,y,z = symbols('xyz').
symbols ('xyz') is now a single symbol 'xyz'.
Change the sympy profile to symbols(x,y,z).<commit_after>c = get_config()
app = c.InteractiveShellApp
# This can be used at any point in a config file to load a sub config
# and merge it into the current one.
load_subconfig('ipython_config.py', profile='default')
lines = """
from __future__ import division
from sympy import *
x, y, z = symbols('x,y,z')
k, m, n = symbols('k,m,n', integer=True)
f, g, h = map(Function, 'fgh')
"""
# You have to make sure that attributes that are containers already
# exist before using them. Simple assigning a new list will override
# all previous values.
if hasattr(app, 'exec_lines'):
app.exec_lines.append(lines)
else:
app.exec_lines = [lines]
# Load the sympy_printing extension to enable nice printing of sympy expr's.
if hasattr(app, 'extensions'):
app.extensions.append('sympyprinting')
else:
app.extensions = ['sympyprinting']
|
ab0f6115c50bea63856c1e880249ad4bdca3ce42
|
src/web/urls.py
|
src/web/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls')),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
|
Add ansible namespace in root URLconf
|
Add ansible namespace in root URLconf
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls')),
]
Add ansible namespace in root URLconf
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls')),
]
<commit_msg>Add ansible namespace in root URLconf<commit_after>
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls')),
]
Add ansible namespace in root URLconffrom django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls')),
]
<commit_msg>Add ansible namespace in root URLconf<commit_after>from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
|
5bf24464b00257a9fa5f66047a2f7815c1e4f8fb
|
tweepy/utils.py
|
tweepy/utils.py
|
# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%f%z"
)
|
# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ"
).replace(tzinfo=datetime.timezone.utc)
# Use %z when support for Python 3.6 is dropped
|
Fix parse_datetime to parse API datetime string format with Python 3.6
|
Fix parse_datetime to parse API datetime string format with Python 3.6
The '%z' directive didn't accept 'Z' until Python 3.7
|
Python
|
mit
|
svven/tweepy,tweepy/tweepy
|
# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%f%z"
)
Fix parse_datetime to parse API datetime string format with Python 3.6
The '%z' directive didn't accept 'Z' until Python 3.7
|
# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ"
).replace(tzinfo=datetime.timezone.utc)
# Use %z when support for Python 3.6 is dropped
|
<commit_before># Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%f%z"
)
<commit_msg>Fix parse_datetime to parse API datetime string format with Python 3.6
The '%z' directive didn't accept 'Z' until Python 3.7<commit_after>
|
# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ"
).replace(tzinfo=datetime.timezone.utc)
# Use %z when support for Python 3.6 is dropped
|
# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%f%z"
)
Fix parse_datetime to parse API datetime string format with Python 3.6
The '%z' directive didn't accept 'Z' until Python 3.7# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ"
).replace(tzinfo=datetime.timezone.utc)
# Use %z when support for Python 3.6 is dropped
|
<commit_before># Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%f%z"
)
<commit_msg>Fix parse_datetime to parse API datetime string format with Python 3.6
The '%z' directive didn't accept 'Z' until Python 3.7<commit_after># Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ"
).replace(tzinfo=datetime.timezone.utc)
# Use %z when support for Python 3.6 is dropped
|
a0740ec8373a3a178e3e83b4ec2768621c697181
|
versions/rattoolsversions.py
|
versions/rattoolsversions.py
|
#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools42(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
"release-4.20")
class RatTools41(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
"release-4.10")
class RatTools4(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
"release-4.00")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
Add fixed release rat-tools versions 4, 4.1, 4.2
|
Add fixed release rat-tools versions 4, 4.1, 4.2
|
Python
|
mit
|
mjmottram/snoing,mjmottram/snoing
|
#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
Add fixed release rat-tools versions 4, 4.1, 4.2
|
#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools42(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
"release-4.20")
class RatTools41(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
"release-4.10")
class RatTools4(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
"release-4.00")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
<commit_before>#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
<commit_msg>Add fixed release rat-tools versions 4, 4.1, 4.2<commit_after>
|
#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools42(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
"release-4.20")
class RatTools41(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
"release-4.10")
class RatTools4(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
"release-4.00")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
Add fixed release rat-tools versions 4, 4.1, 4.2#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools42(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
"release-4.20")
class RatTools41(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
"release-4.10")
class RatTools4(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
"release-4.00")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
<commit_before>#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
<commit_msg>Add fixed release rat-tools versions 4, 4.1, 4.2<commit_after>#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools42(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
"release-4.20")
class RatTools41(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
"release-4.10")
class RatTools4(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
"release-4.00")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
988f4aec1588f409f296e89acb47040cb2606cf8
|
ocradmin/plugins/numpy_nodes.py
|
ocradmin/plugins/numpy_nodes.py
|
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
|
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Rotate90GrayNode(Rotate90Node):
"""
Grayscale version of above.
"""
stage = stages.FILTER_GRAY
name = "Numpy::Rotate90Gray"
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
elif name == "Rotate90Gray":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
|
Add a grayscale rotation node (for testing)
|
Add a grayscale rotation node (for testing)
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
Add a grayscale rotation node (for testing)
|
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Rotate90GrayNode(Rotate90Node):
"""
Grayscale version of above.
"""
stage = stages.FILTER_GRAY
name = "Numpy::Rotate90Gray"
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
elif name == "Rotate90Gray":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
|
<commit_before>
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
<commit_msg>Add a grayscale rotation node (for testing)<commit_after>
|
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Rotate90GrayNode(Rotate90Node):
"""
Grayscale version of above.
"""
stage = stages.FILTER_GRAY
name = "Numpy::Rotate90Gray"
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
elif name == "Rotate90Gray":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
|
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
Add a grayscale rotation node (for testing)
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Rotate90GrayNode(Rotate90Node):
"""
Grayscale version of above.
"""
stage = stages.FILTER_GRAY
name = "Numpy::Rotate90Gray"
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
elif name == "Rotate90Gray":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
|
<commit_before>
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
<commit_msg>Add a grayscale rotation node (for testing)<commit_after>
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Rotate90GrayNode(Rotate90Node):
"""
Grayscale version of above.
"""
stage = stages.FILTER_GRAY
name = "Numpy::Rotate90Gray"
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
elif name == "Rotate90Gray":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
|
dd237d82426ebbc3d2854641e8e73e2001857b67
|
damn/templatetags/damn.py
|
damn/templatetags/damn.py
|
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
|
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, filename=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and filename is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if filename is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(filename=filename, alias=alias, mode=mode, deps=args)
return ''
|
Rename 'name' argument to 'filename'
|
Rename 'name' argument to 'filename'
|
Python
|
bsd-2-clause
|
funkybob/django-amn
|
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
Rename 'name' argument to 'filename'
|
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, filename=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and filename is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if filename is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(filename=filename, alias=alias, mode=mode, deps=args)
return ''
|
<commit_before>
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
<commit_msg>Rename 'name' argument to 'filename'<commit_after>
|
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, filename=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and filename is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if filename is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(filename=filename, alias=alias, mode=mode, deps=args)
return ''
|
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
Rename 'name' argument to 'filename'
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, filename=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and filename is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if filename is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(filename=filename, alias=alias, mode=mode, deps=args)
return ''
|
<commit_before>
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
<commit_msg>Rename 'name' argument to 'filename'<commit_after>
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, filename=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and filename is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if filename is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(filename=filename, alias=alias, mode=mode, deps=args)
return ''
|
7769e5ddd5784b7e56b75fc33f25b0f40ecaa99e
|
cryptex/exchange/__init__.py
|
cryptex/exchange/__init__.py
|
from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy
from cryptex.exchange.btce import BTCE
|
from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy, CryptsyPublic
from cryptex.exchange.btce import BTCE, BTCEPublic
|
Add public imports to exchange module
|
Add public imports to exchange module
|
Python
|
mit
|
coink/cryptex
|
from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy
from cryptex.exchange.btce import BTCE
Add public imports to exchange module
|
from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy, CryptsyPublic
from cryptex.exchange.btce import BTCE, BTCEPublic
|
<commit_before>from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy
from cryptex.exchange.btce import BTCE
<commit_msg>Add public imports to exchange module<commit_after>
|
from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy, CryptsyPublic
from cryptex.exchange.btce import BTCE, BTCEPublic
|
from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy
from cryptex.exchange.btce import BTCE
Add public imports to exchange modulefrom cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy, CryptsyPublic
from cryptex.exchange.btce import BTCE, BTCEPublic
|
<commit_before>from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy
from cryptex.exchange.btce import BTCE
<commit_msg>Add public imports to exchange module<commit_after>from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy, CryptsyPublic
from cryptex.exchange.btce import BTCE, BTCEPublic
|
88f341c6a9d079c89537feb1fb0aa8908732421a
|
evennia/server/migrations/0002_auto_20190128_1820.py
|
evennia/server/migrations/0002_auto_20190128_1820.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = value
conf.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes, to_str
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = to_str(value)
conf.save(update_fields=["db_value2"])
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
|
Fix migration for various situations
|
Fix migration for various situations
|
Python
|
bsd-3-clause
|
jamesbeebop/evennia,jamesbeebop/evennia,jamesbeebop/evennia
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = value
conf.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
Fix migration for various situations
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes, to_str
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = to_str(value)
conf.save(update_fields=["db_value2"])
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = value
conf.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
<commit_msg>Fix migration for various situations<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes, to_str
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = to_str(value)
conf.save(update_fields=["db_value2"])
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = value
conf.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
Fix migration for various situations# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes, to_str
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = to_str(value)
conf.save(update_fields=["db_value2"])
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = value
conf.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
<commit_msg>Fix migration for various situations<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes, to_str
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = to_str(value)
conf.save(update_fields=["db_value2"])
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
|
deb91e9f1e3c7332c005ca498f1c6bc79cf59b34
|
ansible-tests/validations-api.py
|
ansible-tests/validations-api.py
|
#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "Hello World!"
app.run(debug=True)
|
#!/usr/bin/env python
from flask import Flask, jsonify
app = Flask(__name__)
@app.route('/')
def index():
return jsonify({"msg": "Hello World!"})
@app.route('/v1/validations/')
def list_validations():
return jsonify({"TODO": "List existing validations"})
@app.route('/v1/validations/<uuid>/')
def show_validation(uuid):
return jsonify({
'uuid': uuid,
'TODO': "return validation info",
})
@app.route('/v1/validations/<uuid>/run', methods=['PUT'])
def run_validation(uuid):
return jsonify({
'TODO': "run the given validation",
})
app.run(debug=True)
|
Add the basic validation routes
|
Add the basic validation routes
|
Python
|
apache-2.0
|
coolsvap/clapper,coolsvap/clapper,rthallisey/clapper,coolsvap/clapper,rthallisey/clapper
|
#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "Hello World!"
app.run(debug=True)
Add the basic validation routes
|
#!/usr/bin/env python
from flask import Flask, jsonify
app = Flask(__name__)
@app.route('/')
def index():
return jsonify({"msg": "Hello World!"})
@app.route('/v1/validations/')
def list_validations():
return jsonify({"TODO": "List existing validations"})
@app.route('/v1/validations/<uuid>/')
def show_validation(uuid):
return jsonify({
'uuid': uuid,
'TODO': "return validation info",
})
@app.route('/v1/validations/<uuid>/run', methods=['PUT'])
def run_validation(uuid):
return jsonify({
'TODO': "run the given validation",
})
app.run(debug=True)
|
<commit_before>#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "Hello World!"
app.run(debug=True)
<commit_msg>Add the basic validation routes<commit_after>
|
#!/usr/bin/env python
from flask import Flask, jsonify
app = Flask(__name__)
@app.route('/')
def index():
return jsonify({"msg": "Hello World!"})
@app.route('/v1/validations/')
def list_validations():
return jsonify({"TODO": "List existing validations"})
@app.route('/v1/validations/<uuid>/')
def show_validation(uuid):
return jsonify({
'uuid': uuid,
'TODO': "return validation info",
})
@app.route('/v1/validations/<uuid>/run', methods=['PUT'])
def run_validation(uuid):
return jsonify({
'TODO': "run the given validation",
})
app.run(debug=True)
|
#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "Hello World!"
app.run(debug=True)
Add the basic validation routes#!/usr/bin/env python
from flask import Flask, jsonify
app = Flask(__name__)
@app.route('/')
def index():
return jsonify({"msg": "Hello World!"})
@app.route('/v1/validations/')
def list_validations():
return jsonify({"TODO": "List existing validations"})
@app.route('/v1/validations/<uuid>/')
def show_validation(uuid):
return jsonify({
'uuid': uuid,
'TODO': "return validation info",
})
@app.route('/v1/validations/<uuid>/run', methods=['PUT'])
def run_validation(uuid):
return jsonify({
'TODO': "run the given validation",
})
app.run(debug=True)
|
<commit_before>#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "Hello World!"
app.run(debug=True)
<commit_msg>Add the basic validation routes<commit_after>#!/usr/bin/env python
from flask import Flask, jsonify
app = Flask(__name__)
@app.route('/')
def index():
return jsonify({"msg": "Hello World!"})
@app.route('/v1/validations/')
def list_validations():
return jsonify({"TODO": "List existing validations"})
@app.route('/v1/validations/<uuid>/')
def show_validation(uuid):
return jsonify({
'uuid': uuid,
'TODO': "return validation info",
})
@app.route('/v1/validations/<uuid>/run', methods=['PUT'])
def run_validation(uuid):
return jsonify({
'TODO': "run the given validation",
})
app.run(debug=True)
|
f3dd0c94c0c7be2a5ebc2c0df59dd9fb15969eb9
|
ghpythonremote/_configure_ironpython_installation.py
|
ghpythonremote/_configure_ironpython_installation.py
|
import sys
import pip
from .helpers import get_rhino_ironpython_path
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary :all:', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
import sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
Correct --no-binary option, incorrect formatting in pypi doc
|
Correct --no-binary option, incorrect formatting in pypi doc
|
Python
|
mit
|
Digital-Structures/ghpythonremote,pilcru/ghpythonremote
|
import sys
import pip
from .helpers import get_rhino_ironpython_path
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary :all:', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
Correct --no-binary option, incorrect formatting in pypi doc
|
import sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
<commit_before>import sys
import pip
from .helpers import get_rhino_ironpython_path
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary :all:', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
<commit_msg>Correct --no-binary option, incorrect formatting in pypi doc<commit_after>
|
import sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
import sys
import pip
from .helpers import get_rhino_ironpython_path
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary :all:', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
Correct --no-binary option, incorrect formatting in pypi docimport sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
<commit_before>import sys
import pip
from .helpers import get_rhino_ironpython_path
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary :all:', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
<commit_msg>Correct --no-binary option, incorrect formatting in pypi doc<commit_after>import sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
e821236194b7e6f132c9fd08758b751edd8f0fc8
|
Python/ProcBridge/example/client.py
|
Python/ProcBridge/example/client.py
|
import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8077
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
|
import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8877
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
|
Make port number the same in both Java and Python examples
|
Make port number the same in both Java and Python examples
|
Python
|
mit
|
gongzhang/proc-bridge,gongzhang/proc-bridge
|
import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8077
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
Make port number the same in both Java and Python examples
|
import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8877
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
|
<commit_before>import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8077
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
<commit_msg>Make port number the same in both Java and Python examples<commit_after>
|
import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8877
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
|
import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8077
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
Make port number the same in both Java and Python examplesimport procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8877
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
|
<commit_before>import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8077
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
<commit_msg>Make port number the same in both Java and Python examples<commit_after>import procbridge
# from procbridge import procbridge
host = '127.0.0.1'
port = 8877
client = procbridge.ProcBridge(host, port)
print(client.request('echo', {}))
print(client.request('add', {
'elements': [1, 2, 3, 4, 5]
}))
|
b9dd3a5d2f52f6cebb55b322cf4ddb2b9e1d8ccc
|
arches/db/install/truncate_db.py
|
arches/db/install/truncate_db.py
|
import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ POSTGIS_TEMPLATE }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" TEMPLATE={{POSTGIS_TEMPLATE}}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
|
import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
|
Remove reference to postgis template. Django now installs postgis when database is created.
|
Remove reference to postgis template. Django now installs postgis when database is created.
|
Python
|
agpl-3.0
|
archesproject/arches,cvast/arches,cvast/arches,archesproject/arches,cvast/arches,cvast/arches,archesproject/arches,archesproject/arches
|
import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ POSTGIS_TEMPLATE }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" TEMPLATE={{POSTGIS_TEMPLATE}}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
Remove reference to postgis template. Django now installs postgis when database is created.
|
import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
|
<commit_before>import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ POSTGIS_TEMPLATE }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" TEMPLATE={{POSTGIS_TEMPLATE}}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
<commit_msg>Remove reference to postgis template. Django now installs postgis when database is created.<commit_after>
|
import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
|
import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ POSTGIS_TEMPLATE }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" TEMPLATE={{POSTGIS_TEMPLATE}}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
Remove reference to postgis template. Django now installs postgis when database is created.import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
|
<commit_before>import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ POSTGIS_TEMPLATE }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" TEMPLATE={{POSTGIS_TEMPLATE}}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
<commit_msg>Remove reference to postgis template. Django now installs postgis when database is created.<commit_after>import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
|
48a92a395967aa6a0171495e80566910f76fd06c
|
kino/functions/github.py
|
kino/functions/github.py
|
import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + len(commit_events))
|
import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + str(len(commit_events)))
|
Modify commit count str format
|
Modify commit count str format
|
Python
|
mit
|
DongjunLee/kino-bot
|
import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + len(commit_events))
Modify commit count str format
|
import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + str(len(commit_events)))
|
<commit_before>import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + len(commit_events))
<commit_msg>Modify commit count str format<commit_after>
|
import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + str(len(commit_events)))
|
import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + len(commit_events))
Modify commit count str formatimport datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + str(len(commit_events)))
|
<commit_before>import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + len(commit_events))
<commit_msg>Modify commit count str format<commit_after>import datetime
from github import Github
from slack.slackbot import SlackerAdapter
from utils.config import Config
from utils.resource import MessageResource
class GithubManager(object):
def __init__(self):
self.config = Config().github
self.username = self.config["USERNAME"]
password = self.config["PASSWORD"]
self.github = Github(self.username, password)
self.slackbot = SlackerAdapter()
def daily_commit_check(self, channel="#personal_assistant"):
today = datetime.datetime.today()
today_date = datetime.datetime(today.year, today.month, today.day)
today_date_ko = today_date - datetime.timedelta(hours=9)
commit_events = []
for event in self.github.get_user(self.username).get_events():
if event.created_at > today_date_ko:
if event.type in ['PushEvent', 'PullRequestEvent']:
commit_events.append(event)
else:
break
if len(commit_events) == 0:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EMPTY)
else:
self.slackbot.send_message(channel=channel, text=MessageResource.GITHUB_COMMIT_EXIST + str(len(commit_events)))
|
859c67eff781285473b92f6e363c9c3a3aaed33e
|
ExplorerTest.py
|
ExplorerTest.py
|
import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
|
import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
|
Fix tab (change to spaces)
|
Fix tab (change to spaces)
Fixes:
$ python ExplorerTest.py
File "ExplorerTest.py", line 6
time.sleep(3)
^
IndentationError: unexpected indent
|
Python
|
mit
|
EnhancedRadioDevices/915MHzEdisonExplorer_SW
|
import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
Fix tab (change to spaces)
Fixes:
$ python ExplorerTest.py
File "ExplorerTest.py", line 6
time.sleep(3)
^
IndentationError: unexpected indent
|
import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
|
<commit_before>import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
<commit_msg>Fix tab (change to spaces)
Fixes:
$ python ExplorerTest.py
File "ExplorerTest.py", line 6
time.sleep(3)
^
IndentationError: unexpected indent<commit_after>
|
import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
|
import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
Fix tab (change to spaces)
Fixes:
$ python ExplorerTest.py
File "ExplorerTest.py", line 6
time.sleep(3)
^
IndentationError: unexpected indentimport time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
|
<commit_before>import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
<commit_msg>Fix tab (change to spaces)
Fixes:
$ python ExplorerTest.py
File "ExplorerTest.py", line 6
time.sleep(3)
^
IndentationError: unexpected indent<commit_after>import time
import spi_serial
if __name__ == "__main__":
ss = spi_serial.SpiSerial()
time.sleep(3)
cmd = [1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [2]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
cmd = [8,1,1]
ss.write(cmd)
if ss.inWaiting() > 0:
print(''.join(chr(k) for k in ss.read(0)))
|
7a308233707e7e024311a3767367875921c6217b
|
graphiter/models.py
|
graphiter/models.py
|
from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"")
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
|
from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
|
Add blank=True for Page.time_until field
|
Add blank=True for Page.time_until field
|
Python
|
bsd-2-clause
|
jwineinger/django-graphiter
|
from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"")
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
Add blank=True for Page.time_until field
|
from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
|
<commit_before>from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"")
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
<commit_msg>Add blank=True for Page.time_until field<commit_after>
|
from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
|
from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"")
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
Add blank=True for Page.time_until fieldfrom django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
|
<commit_before>from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"")
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
<commit_msg>Add blank=True for Page.time_until field<commit_after>from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
|
e8dae2888576fa2305cddda0e48f332276b176b5
|
app/__init__.py
|
app/__init__.py
|
#!flask/bin/python
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
|
#!flask/bin/python
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
|
Update to flask_sqlalchemy to use python 3 import
|
Update to flask_sqlalchemy to use python 3 import
|
Python
|
agpl-3.0
|
lasa/website,lasa/website,lasa/website
|
#!flask/bin/python
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
Update to flask_sqlalchemy to use python 3 import
|
#!flask/bin/python
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
|
<commit_before>#!flask/bin/python
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
<commit_msg>Update to flask_sqlalchemy to use python 3 import<commit_after>
|
#!flask/bin/python
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
|
#!flask/bin/python
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
Update to flask_sqlalchemy to use python 3 import#!flask/bin/python
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
|
<commit_before>#!flask/bin/python
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
<commit_msg>Update to flask_sqlalchemy to use python 3 import<commit_after>#!flask/bin/python
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
from app import views, models
|
fd697a0a4a4aeb3455ec7b7e8b3ed38ce0eb4502
|
test/sockettest.py
|
test/sockettest.py
|
import kaa
@kaa.coroutine()
def new_client(client):
ip, port = client.address
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = tls.TLSSocket()
#remote = kaa.Socket()
yield remote.connect('www.freevo.org:80')
#yield remote.connect('urandom.ca:443')
#try:
# yield remote.starttls_client()
#except:
# print "TLS ERROR"
# return
remote.write('GET / HTTP/1.0\n\n')
while remote.connected:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
from kaa.net import tls
#server = tls.TLSSocket()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
|
import logging
import kaa
from kaa.net.tls import TLSSocket
log = logging.getLogger('tls').ensureRootHandler()
@kaa.coroutine()
def new_client(client):
ip, port = client.peer[:2]
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = TLSSocket()
yield remote.connect('www.google.com:443')
yield remote.starttls_client()
yield remote.write('GET / HTTP/1.0\n\n')
while remote.readable:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
|
Fix TLS support in socket test
|
Fix TLS support in socket test
|
Python
|
lgpl-2.1
|
freevo/kaa-base,freevo/kaa-base
|
import kaa
@kaa.coroutine()
def new_client(client):
ip, port = client.address
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = tls.TLSSocket()
#remote = kaa.Socket()
yield remote.connect('www.freevo.org:80')
#yield remote.connect('urandom.ca:443')
#try:
# yield remote.starttls_client()
#except:
# print "TLS ERROR"
# return
remote.write('GET / HTTP/1.0\n\n')
while remote.connected:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
from kaa.net import tls
#server = tls.TLSSocket()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
Fix TLS support in socket test
|
import logging
import kaa
from kaa.net.tls import TLSSocket
log = logging.getLogger('tls').ensureRootHandler()
@kaa.coroutine()
def new_client(client):
ip, port = client.peer[:2]
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = TLSSocket()
yield remote.connect('www.google.com:443')
yield remote.starttls_client()
yield remote.write('GET / HTTP/1.0\n\n')
while remote.readable:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
|
<commit_before>import kaa
@kaa.coroutine()
def new_client(client):
ip, port = client.address
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = tls.TLSSocket()
#remote = kaa.Socket()
yield remote.connect('www.freevo.org:80')
#yield remote.connect('urandom.ca:443')
#try:
# yield remote.starttls_client()
#except:
# print "TLS ERROR"
# return
remote.write('GET / HTTP/1.0\n\n')
while remote.connected:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
from kaa.net import tls
#server = tls.TLSSocket()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
<commit_msg>Fix TLS support in socket test<commit_after>
|
import logging
import kaa
from kaa.net.tls import TLSSocket
log = logging.getLogger('tls').ensureRootHandler()
@kaa.coroutine()
def new_client(client):
ip, port = client.peer[:2]
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = TLSSocket()
yield remote.connect('www.google.com:443')
yield remote.starttls_client()
yield remote.write('GET / HTTP/1.0\n\n')
while remote.readable:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
|
import kaa
@kaa.coroutine()
def new_client(client):
ip, port = client.address
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = tls.TLSSocket()
#remote = kaa.Socket()
yield remote.connect('www.freevo.org:80')
#yield remote.connect('urandom.ca:443')
#try:
# yield remote.starttls_client()
#except:
# print "TLS ERROR"
# return
remote.write('GET / HTTP/1.0\n\n')
while remote.connected:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
from kaa.net import tls
#server = tls.TLSSocket()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
Fix TLS support in socket testimport logging
import kaa
from kaa.net.tls import TLSSocket
log = logging.getLogger('tls').ensureRootHandler()
@kaa.coroutine()
def new_client(client):
ip, port = client.peer[:2]
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = TLSSocket()
yield remote.connect('www.google.com:443')
yield remote.starttls_client()
yield remote.write('GET / HTTP/1.0\n\n')
while remote.readable:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
|
<commit_before>import kaa
@kaa.coroutine()
def new_client(client):
ip, port = client.address
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = tls.TLSSocket()
#remote = kaa.Socket()
yield remote.connect('www.freevo.org:80')
#yield remote.connect('urandom.ca:443')
#try:
# yield remote.starttls_client()
#except:
# print "TLS ERROR"
# return
remote.write('GET / HTTP/1.0\n\n')
while remote.connected:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
from kaa.net import tls
#server = tls.TLSSocket()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
<commit_msg>Fix TLS support in socket test<commit_after>import logging
import kaa
from kaa.net.tls import TLSSocket
log = logging.getLogger('tls').ensureRootHandler()
@kaa.coroutine()
def new_client(client):
ip, port = client.peer[:2]
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = TLSSocket()
yield remote.connect('www.google.com:443')
yield remote.starttls_client()
yield remote.write('GET / HTTP/1.0\n\n')
while remote.readable:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
|
91c620e228ad73e2e34efbd60813ed35b3f9ef46
|
tests/test_dtool_dataset_freeze.py
|
tests/test_dtool_dataset_freeze.py
|
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import freeze
runner = CliRunner()
# Create an empty dataset
dataset_name = "my_dataset"
dataset = DataSet(dataset_name, data_directory="data")
dataset.persist_to_path(".")
# Add some files to it.
dest_dir = os.path.join(".", dataset.data_directory, "sample_files")
shutil.copytree(SAMPLE_FILES_DIR, dest_dir)
# At this point the manifest has not been updated.
assert len(dataset.identifiers) == 0
result = runner.invoke(freeze, ["."])
assert result.exit_code == 0
# Manifest has been updated.
assert len(dataset.identifiers) == 2
|
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet, ProtoDataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import create, freeze
runner = CliRunner()
dataset_name = "my_dataset"
result = runner.invoke(create, [dataset_name])
assert result.exit_code == 0
# At this point we have a proto dataset
dataset_abspath = os.path.abspath(dataset_name)
dataset_uri = "disk:{}".format(dataset_abspath)
dataset = ProtoDataSet.from_uri(dataset_uri)
# Add a file to the proto dataset.
sample_file_abspath = os.path.join(dataset_abspath, "data", "hello.txt")
with open(sample_file_abspath, "w") as fh:
fh.write("hello world")
result = runner.invoke(freeze, [dataset_uri])
assert result.exit_code == 0
# Now we have a dataset.
dataset = DataSet.from_uri(dataset_uri)
# Manifest has been updated.
assert len(dataset.identifiers) == 1
|
Fix the freeze functional test
|
Fix the freeze functional test
|
Python
|
mit
|
jic-dtool/dtool-create
|
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import freeze
runner = CliRunner()
# Create an empty dataset
dataset_name = "my_dataset"
dataset = DataSet(dataset_name, data_directory="data")
dataset.persist_to_path(".")
# Add some files to it.
dest_dir = os.path.join(".", dataset.data_directory, "sample_files")
shutil.copytree(SAMPLE_FILES_DIR, dest_dir)
# At this point the manifest has not been updated.
assert len(dataset.identifiers) == 0
result = runner.invoke(freeze, ["."])
assert result.exit_code == 0
# Manifest has been updated.
assert len(dataset.identifiers) == 2
Fix the freeze functional test
|
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet, ProtoDataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import create, freeze
runner = CliRunner()
dataset_name = "my_dataset"
result = runner.invoke(create, [dataset_name])
assert result.exit_code == 0
# At this point we have a proto dataset
dataset_abspath = os.path.abspath(dataset_name)
dataset_uri = "disk:{}".format(dataset_abspath)
dataset = ProtoDataSet.from_uri(dataset_uri)
# Add a file to the proto dataset.
sample_file_abspath = os.path.join(dataset_abspath, "data", "hello.txt")
with open(sample_file_abspath, "w") as fh:
fh.write("hello world")
result = runner.invoke(freeze, [dataset_uri])
assert result.exit_code == 0
# Now we have a dataset.
dataset = DataSet.from_uri(dataset_uri)
# Manifest has been updated.
assert len(dataset.identifiers) == 1
|
<commit_before>"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import freeze
runner = CliRunner()
# Create an empty dataset
dataset_name = "my_dataset"
dataset = DataSet(dataset_name, data_directory="data")
dataset.persist_to_path(".")
# Add some files to it.
dest_dir = os.path.join(".", dataset.data_directory, "sample_files")
shutil.copytree(SAMPLE_FILES_DIR, dest_dir)
# At this point the manifest has not been updated.
assert len(dataset.identifiers) == 0
result = runner.invoke(freeze, ["."])
assert result.exit_code == 0
# Manifest has been updated.
assert len(dataset.identifiers) == 2
<commit_msg>Fix the freeze functional test<commit_after>
|
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet, ProtoDataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import create, freeze
runner = CliRunner()
dataset_name = "my_dataset"
result = runner.invoke(create, [dataset_name])
assert result.exit_code == 0
# At this point we have a proto dataset
dataset_abspath = os.path.abspath(dataset_name)
dataset_uri = "disk:{}".format(dataset_abspath)
dataset = ProtoDataSet.from_uri(dataset_uri)
# Add a file to the proto dataset.
sample_file_abspath = os.path.join(dataset_abspath, "data", "hello.txt")
with open(sample_file_abspath, "w") as fh:
fh.write("hello world")
result = runner.invoke(freeze, [dataset_uri])
assert result.exit_code == 0
# Now we have a dataset.
dataset = DataSet.from_uri(dataset_uri)
# Manifest has been updated.
assert len(dataset.identifiers) == 1
|
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import freeze
runner = CliRunner()
# Create an empty dataset
dataset_name = "my_dataset"
dataset = DataSet(dataset_name, data_directory="data")
dataset.persist_to_path(".")
# Add some files to it.
dest_dir = os.path.join(".", dataset.data_directory, "sample_files")
shutil.copytree(SAMPLE_FILES_DIR, dest_dir)
# At this point the manifest has not been updated.
assert len(dataset.identifiers) == 0
result = runner.invoke(freeze, ["."])
assert result.exit_code == 0
# Manifest has been updated.
assert len(dataset.identifiers) == 2
Fix the freeze functional test"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet, ProtoDataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import create, freeze
runner = CliRunner()
dataset_name = "my_dataset"
result = runner.invoke(create, [dataset_name])
assert result.exit_code == 0
# At this point we have a proto dataset
dataset_abspath = os.path.abspath(dataset_name)
dataset_uri = "disk:{}".format(dataset_abspath)
dataset = ProtoDataSet.from_uri(dataset_uri)
# Add a file to the proto dataset.
sample_file_abspath = os.path.join(dataset_abspath, "data", "hello.txt")
with open(sample_file_abspath, "w") as fh:
fh.write("hello world")
result = runner.invoke(freeze, [dataset_uri])
assert result.exit_code == 0
# Now we have a dataset.
dataset = DataSet.from_uri(dataset_uri)
# Manifest has been updated.
assert len(dataset.identifiers) == 1
|
<commit_before>"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import freeze
runner = CliRunner()
# Create an empty dataset
dataset_name = "my_dataset"
dataset = DataSet(dataset_name, data_directory="data")
dataset.persist_to_path(".")
# Add some files to it.
dest_dir = os.path.join(".", dataset.data_directory, "sample_files")
shutil.copytree(SAMPLE_FILES_DIR, dest_dir)
# At this point the manifest has not been updated.
assert len(dataset.identifiers) == 0
result = runner.invoke(freeze, ["."])
assert result.exit_code == 0
# Manifest has been updated.
assert len(dataset.identifiers) == 2
<commit_msg>Fix the freeze functional test<commit_after>"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet, ProtoDataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import create, freeze
runner = CliRunner()
dataset_name = "my_dataset"
result = runner.invoke(create, [dataset_name])
assert result.exit_code == 0
# At this point we have a proto dataset
dataset_abspath = os.path.abspath(dataset_name)
dataset_uri = "disk:{}".format(dataset_abspath)
dataset = ProtoDataSet.from_uri(dataset_uri)
# Add a file to the proto dataset.
sample_file_abspath = os.path.join(dataset_abspath, "data", "hello.txt")
with open(sample_file_abspath, "w") as fh:
fh.write("hello world")
result = runner.invoke(freeze, [dataset_uri])
assert result.exit_code == 0
# Now we have a dataset.
dataset = DataSet.from_uri(dataset_uri)
# Manifest has been updated.
assert len(dataset.identifiers) == 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.