commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
78b82b0c5e074c279288b9d53fe9cb5cfe1381ae
|
fabfile.py
|
fabfile.py
|
# -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
# -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
local('rm -rf test/unit/__pycache__')
local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
Kill __pycache__ directories in tests
|
Kill __pycache__ directories in tests
|
Python
|
mit
|
smarter-travel-media/stac
|
# -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
Kill __pycache__ directories in tests
|
# -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
local('rm -rf test/unit/__pycache__')
local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
<commit_before># -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
<commit_msg>Kill __pycache__ directories in tests<commit_after>
|
# -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
local('rm -rf test/unit/__pycache__')
local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
# -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
Kill __pycache__ directories in tests# -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
local('rm -rf test/unit/__pycache__')
local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
<commit_before># -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
<commit_msg>Kill __pycache__ directories in tests<commit_after># -*- coding: utf-8 -*-
from fabric.api import (
lcd,
local,
task)
@task
def clean():
local('rm -rf wheelhouse')
local('rm -rf dist')
local('rm -rf build')
local('rm -rf test/unit/__pycache__')
local('rm -rf test/integration/__pycache__')
with lcd('doc'):
local('make clean')
@task
def docs():
with lcd('doc'):
local('make html')
|
b1890ccd9946054cde25bbd511e317ec0b844b9a
|
webserver/hermes/models.py
|
webserver/hermes/models.py
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
def __str__(self):
return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
Add str method to TeamStats
|
Add str method to TeamStats
|
Python
|
bsd-3-clause
|
siggame/webserver,siggame/webserver,siggame/webserver
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
Add str method to TeamStats
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
def __str__(self):
return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
<commit_before>from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
<commit_msg>Add str method to TeamStats<commit_after>
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
def __str__(self):
return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
Add str method to TeamStatsfrom django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
def __str__(self):
return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
<commit_before>from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
<commit_msg>Add str method to TeamStats<commit_after>from django.db import models
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.conf import settings
from competition.models import Team
import json
class TeamStats(models.Model):
team = models.OneToOneField(Team)
data_field = models.TextField(null=True, default="null")
def __str__(self):
return self.team.name
@property
def data(self):
return json.loads(self.data_field)
@data.setter
def data(self, value):
self.data_field = json.dumps(value)
|
8cf555f2c8424cc8460228bac07940a19cf1a6a5
|
zinnia_akismet/__init__.py
|
zinnia_akismet/__init__.py
|
"""Spam checker backends for Zinnia based on Akismet"""
|
"""Spam checker backends for Zinnia based on Akismet"""
__version__ = '1.0.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/zinnia-spam-checker-akismet'
|
Move package metadatas at the code level
|
Move package metadatas at the code level
|
Python
|
bsd-3-clause
|
django-blog-zinnia/zinnia-spam-checker-akismet
|
"""Spam checker backends for Zinnia based on Akismet"""
Move package metadatas at the code level
|
"""Spam checker backends for Zinnia based on Akismet"""
__version__ = '1.0.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/zinnia-spam-checker-akismet'
|
<commit_before>"""Spam checker backends for Zinnia based on Akismet"""
<commit_msg>Move package metadatas at the code level<commit_after>
|
"""Spam checker backends for Zinnia based on Akismet"""
__version__ = '1.0.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/zinnia-spam-checker-akismet'
|
"""Spam checker backends for Zinnia based on Akismet"""
Move package metadatas at the code level"""Spam checker backends for Zinnia based on Akismet"""
__version__ = '1.0.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/zinnia-spam-checker-akismet'
|
<commit_before>"""Spam checker backends for Zinnia based on Akismet"""
<commit_msg>Move package metadatas at the code level<commit_after>"""Spam checker backends for Zinnia based on Akismet"""
__version__ = '1.0.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/zinnia-spam-checker-akismet'
|
43b00bdb18131c49a6e52d752aeb0549298d8cda
|
avena/tests/test-image.py
|
avena/tests/test-image.py
|
#!/usr/bin/env python
from numpy import all, array, dstack
from .. import image
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
if __name__ == '__main__':
pass
|
#!/usr/bin/env python
from numpy import all, allclose, array, dstack
from os import remove
from os.path import sep, split
from .. import image, utils
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
def test_read_save():
f = split(__file__)[0] + sep + 'drink.png'
x = image.read(f)
tmp = utils.rand_filename(f)
try:
image.save(x, tmp)
y = image.read(tmp)
assert allclose(x, y, rtol=1e-4, atol=1e-1)
finally:
remove(tmp)
if __name__ == '__main__':
pass
|
Add more unit tests for the image module.
|
Add more unit tests for the image module.
|
Python
|
isc
|
eliteraspberries/avena
|
#!/usr/bin/env python
from numpy import all, array, dstack
from .. import image
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
if __name__ == '__main__':
pass
Add more unit tests for the image module.
|
#!/usr/bin/env python
from numpy import all, allclose, array, dstack
from os import remove
from os.path import sep, split
from .. import image, utils
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
def test_read_save():
f = split(__file__)[0] + sep + 'drink.png'
x = image.read(f)
tmp = utils.rand_filename(f)
try:
image.save(x, tmp)
y = image.read(tmp)
assert allclose(x, y, rtol=1e-4, atol=1e-1)
finally:
remove(tmp)
if __name__ == '__main__':
pass
|
<commit_before>#!/usr/bin/env python
from numpy import all, array, dstack
from .. import image
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
if __name__ == '__main__':
pass
<commit_msg>Add more unit tests for the image module.<commit_after>
|
#!/usr/bin/env python
from numpy import all, allclose, array, dstack
from os import remove
from os.path import sep, split
from .. import image, utils
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
def test_read_save():
f = split(__file__)[0] + sep + 'drink.png'
x = image.read(f)
tmp = utils.rand_filename(f)
try:
image.save(x, tmp)
y = image.read(tmp)
assert allclose(x, y, rtol=1e-4, atol=1e-1)
finally:
remove(tmp)
if __name__ == '__main__':
pass
|
#!/usr/bin/env python
from numpy import all, array, dstack
from .. import image
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
if __name__ == '__main__':
pass
Add more unit tests for the image module.#!/usr/bin/env python
from numpy import all, allclose, array, dstack
from os import remove
from os.path import sep, split
from .. import image, utils
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
def test_read_save():
f = split(__file__)[0] + sep + 'drink.png'
x = image.read(f)
tmp = utils.rand_filename(f)
try:
image.save(x, tmp)
y = image.read(tmp)
assert allclose(x, y, rtol=1e-4, atol=1e-1)
finally:
remove(tmp)
if __name__ == '__main__':
pass
|
<commit_before>#!/usr/bin/env python
from numpy import all, array, dstack
from .. import image
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
if __name__ == '__main__':
pass
<commit_msg>Add more unit tests for the image module.<commit_after>#!/usr/bin/env python
from numpy import all, allclose, array, dstack
from os import remove
from os.path import sep, split
from .. import image, utils
def test_get_channels():
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
for z in image.get_channels(y):
assert all(z == x)
def test_map_to_channels():
def f(x):
return x + 1
x = array([[1, 2, 3], [2, 3, 4], [3, 4, 5]])
y = dstack((x, x, x))
z = image.map_to_channels(
f,
lambda shape: shape,
y,
)
assert all(z == y + 1)
def test_read_save():
f = split(__file__)[0] + sep + 'drink.png'
x = image.read(f)
tmp = utils.rand_filename(f)
try:
image.save(x, tmp)
y = image.read(tmp)
assert allclose(x, y, rtol=1e-4, atol=1e-1)
finally:
remove(tmp)
if __name__ == '__main__':
pass
|
30674fb6e244373bb8b1ed74b7a38e5cc2ed19a7
|
ibmcnx/doc/Documentation.py
|
ibmcnx/doc/Documentation.py
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
sys.stdout = open("/tmp/documentation.txt", "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
emp1 = ibmcnx.filehandle.Ibmcnxfile()
sys.stdout = emp1
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
Create script to save documentation to a file
|
4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
sys.stdout = open("/tmp/documentation.txt", "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
emp1 = ibmcnx.filehandle.Ibmcnxfile()
sys.stdout = emp1
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
<commit_before>######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
sys.stdout = open("/tmp/documentation.txt", "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
emp1 = ibmcnx.filehandle.Ibmcnxfile()
sys.stdout = emp1
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
sys.stdout = open("/tmp/documentation.txt", "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
emp1 = ibmcnx.filehandle.Ibmcnxfile()
sys.stdout = emp1
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
<commit_before>######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
sys.stdout = open("/tmp/documentation.txt", "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import ibmcnx.filehandle
import sys
emp1 = ibmcnx.filehandle.Ibmcnxfile()
sys.stdout = emp1
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
9236ac7c0893cc1c4cf19755683d4ab38590de7d
|
spym-as.py
|
spym-as.py
|
#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
from util.hexdump import hexdump
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
return parser.parse_args()
def main():
args = get_args()
fname = args.file
if fname.endswith('.s'):
fname = fname[-2:] + '.spym'
else:
fname = fname + '.spym'
# with open(fname, 'w') as f:
hexdump(assemble(args.file))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
parser.add_argument('-o', '--output', type=str,
help='File to write output to. Default is a.out',
default='a.out')
return parser.parse_args()
def main():
args = get_args()
with open(args.output, 'w') as f:
f.write(assemble(args.file))
if __name__ == '__main__':
main()
|
Write to file instead of hexdump
|
Write to file instead of hexdump
|
Python
|
mit
|
mossberg/spym,mossberg/spym
|
#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
from util.hexdump import hexdump
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
return parser.parse_args()
def main():
args = get_args()
fname = args.file
if fname.endswith('.s'):
fname = fname[-2:] + '.spym'
else:
fname = fname + '.spym'
# with open(fname, 'w') as f:
hexdump(assemble(args.file))
if __name__ == '__main__':
main()
Write to file instead of hexdump
|
#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
parser.add_argument('-o', '--output', type=str,
help='File to write output to. Default is a.out',
default='a.out')
return parser.parse_args()
def main():
args = get_args()
with open(args.output, 'w') as f:
f.write(assemble(args.file))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
from util.hexdump import hexdump
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
return parser.parse_args()
def main():
args = get_args()
fname = args.file
if fname.endswith('.s'):
fname = fname[-2:] + '.spym'
else:
fname = fname + '.spym'
# with open(fname, 'w') as f:
hexdump(assemble(args.file))
if __name__ == '__main__':
main()
<commit_msg>Write to file instead of hexdump<commit_after>
|
#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
parser.add_argument('-o', '--output', type=str,
help='File to write output to. Default is a.out',
default='a.out')
return parser.parse_args()
def main():
args = get_args()
with open(args.output, 'w') as f:
f.write(assemble(args.file))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
from util.hexdump import hexdump
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
return parser.parse_args()
def main():
args = get_args()
fname = args.file
if fname.endswith('.s'):
fname = fname[-2:] + '.spym'
else:
fname = fname + '.spym'
# with open(fname, 'w') as f:
hexdump(assemble(args.file))
if __name__ == '__main__':
main()
Write to file instead of hexdump#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
parser.add_argument('-o', '--output', type=str,
help='File to write output to. Default is a.out',
default='a.out')
return parser.parse_args()
def main():
args = get_args()
with open(args.output, 'w') as f:
f.write(assemble(args.file))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
from util.hexdump import hexdump
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
return parser.parse_args()
def main():
args = get_args()
fname = args.file
if fname.endswith('.s'):
fname = fname[-2:] + '.spym'
else:
fname = fname + '.spym'
# with open(fname, 'w') as f:
hexdump(assemble(args.file))
if __name__ == '__main__':
main()
<commit_msg>Write to file instead of hexdump<commit_after>#!/usr/bin/env python2.7
import argparse
from util.assemble import assemble
def get_args():
parser = argparse.ArgumentParser(description='Spym MIPS Assembler. Generates "spym" format binaries.')
parser.add_argument('file', metavar='FILE', type=str,
help='MIPS source file')
parser.add_argument('-o', '--output', type=str,
help='File to write output to. Default is a.out',
default='a.out')
return parser.parse_args()
def main():
args = get_args()
with open(args.output, 'w') as f:
f.write(assemble(args.file))
if __name__ == '__main__':
main()
|
de06a96af887b2bbfa6589b2881606c29000398e
|
cabot/cabotapp/jenkins.py
|
cabot/cabotapp/jenkins.py
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue') or status['color'].startswith('green'): # Jenkins uses "blue" for successful; Hudson uses "green"
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
Add check for "green" status for Hudson
|
Add check for "green" status for Hudson
Hudson and Jenkins have extremely similar APIs, one of the main differences is that Jenkins uses "blue" but Hudson uses "green" for a successful last build. Adding the option to check for "green" status allows the Jenkins checks to also work for checking Hudson jobs.
Jenkins does not use green as a color: https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/BallColor.java
|
Python
|
mit
|
arachnys/cabot,cmclaughlin/cabot,cmclaughlin/cabot,arachnys/cabot,maks-us/cabot,cmclaughlin/cabot,bonniejools/cabot,arachnys/cabot,bonniejools/cabot,bonniejools/cabot,bonniejools/cabot,maks-us/cabot,arachnys/cabot,maks-us/cabot,maks-us/cabot,cmclaughlin/cabot
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
Add check for "green" status for Hudson
Hudson and Jenkins have extremely similar APIs, one of the main differences is that Jenkins uses "blue" but Hudson uses "green" for a successful last build. Adding the option to check for "green" status allows the Jenkins checks to also work for checking Hudson jobs.
Jenkins does not use green as a color: https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/BallColor.java
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue') or status['color'].startswith('green'): # Jenkins uses "blue" for successful; Hudson uses "green"
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
<commit_before>from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
<commit_msg>Add check for "green" status for Hudson
Hudson and Jenkins have extremely similar APIs, one of the main differences is that Jenkins uses "blue" but Hudson uses "green" for a successful last build. Adding the option to check for "green" status allows the Jenkins checks to also work for checking Hudson jobs.
Jenkins does not use green as a color: https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/BallColor.java<commit_after>
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue') or status['color'].startswith('green'): # Jenkins uses "blue" for successful; Hudson uses "green"
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
Add check for "green" status for Hudson
Hudson and Jenkins have extremely similar APIs, one of the main differences is that Jenkins uses "blue" but Hudson uses "green" for a successful last build. Adding the option to check for "green" status allows the Jenkins checks to also work for checking Hudson jobs.
Jenkins does not use green as a color: https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/BallColor.javafrom os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue') or status['color'].startswith('green'): # Jenkins uses "blue" for successful; Hudson uses "green"
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
<commit_before>from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue'):
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
<commit_msg>Add check for "green" status for Hudson
Hudson and Jenkins have extremely similar APIs, one of the main differences is that Jenkins uses "blue" but Hudson uses "green" for a successful last build. Adding the option to check for "green" status allows the Jenkins checks to also work for checking Hudson jobs.
Jenkins does not use green as a color: https://github.com/jenkinsci/jenkins/blob/master/core/src/main/java/hudson/model/BallColor.java<commit_after>from os import environ as env
from django.conf import settings
import requests
from datetime import datetime
from django.utils import timezone
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
auth = (settings.JENKINS_USER, settings.JENKINS_PASS)
def get_job_status(jobname):
ret = {
'active': True,
'succeeded': False,
'blocked_build_time': None,
'status_code': 200
}
endpoint = settings.JENKINS_API + 'job/%s/api/json' % jobname
resp = requests.get(endpoint, auth=auth, verify=True)
status = resp.json()
ret['status_code'] = resp.status_code
ret['job_number'] = status['lastBuild'].get('number', None)
if status['color'].startswith('blue') or status['color'].startswith('green'): # Jenkins uses "blue" for successful; Hudson uses "green"
ret['active'] = True
ret['succeeded'] = True
elif status['color'] == 'disabled':
ret['active'] = False
ret['succeeded'] = False
if status['queueItem'] and status['queueItem']['blocked']:
time_blocked_since = datetime.utcfromtimestamp(
float(status['queueItem']['inQueueSince']) / 1000).replace(tzinfo=timezone.utc)
ret['blocked_build_time'] = (timezone.now() - time_blocked_since).total_seconds()
return ret
|
29564aca770969c0ff75413f059e5db7d33a69a7
|
blog/utils.py
|
blog/utils.py
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
slug = self.kwargs.get('slug')
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
|
Add date URL kwarg options to PostGetMixin.
|
Ch18: Add date URL kwarg options to PostGetMixin.
|
Python
|
bsd-2-clause
|
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
slug = self.kwargs.get('slug')
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
Ch18: Add date URL kwarg options to PostGetMixin.
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
|
<commit_before>from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
slug = self.kwargs.get('slug')
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
<commit_msg>Ch18: Add date URL kwarg options to PostGetMixin.<commit_after>
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
|
from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
slug = self.kwargs.get('slug')
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
Ch18: Add date URL kwarg options to PostGetMixin.from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
|
<commit_before>from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get('year')
month = self.kwargs.get('month')
slug = self.kwargs.get('slug')
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
<commit_msg>Ch18: Add date URL kwarg options to PostGetMixin.<commit_after>from django.shortcuts import get_object_or_404
from .models import Post
class PostGetMixin:
month_url_kwarg = 'month'
year_url_kwarg = 'year'
errors = {
'url_kwargs':
"Generic view {} must be called with "
"year, month, and slug.",
}
def get_object(self, queryset=None):
year = self.kwargs.get(
self.year_url_kwarg)
month = self.kwargs.get(
self.month_url_kwarg)
slug = self.kwargs.get(
self.slug_url_kwarg)
if (year is None
or month is None
or slug is None):
raise AttributeError(
self.errors['url_kwargs'].format(
self.__class__.__name__))
return get_object_or_404(
Post,
pub_date__year=year,
pub_date__month=month,
slug__iexact=slug)
|
601b3d7db3bedd090291f1a52f22f6daee9987fd
|
imapbox.py
|
imapbox.py
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-s', dest='host', help="IMAP host, like imap.gmail.com", required=True)
argparser.add_argument('-u', dest='username', help="IMAP username", required=True)
argparser.add_argument('-p', dest='password', help="IMAP password", required=True)
argparser.add_argument('-r', dest='remote_folder', help="Remote folder to download", default='INBOX')
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
mailbox = MailboxClient(args.host, args.username, args.password, args.remote_folder)
mailbox.copy_emails(args.local_folder)
mailbox.cleanup()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
import ConfigParser, os
def load_configuration(args):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(['/etc/imapbox/config.cfg', os.path.expanduser('~/.config/imapbox/config.cfg')])
options = {
'local_folder': '/var/imapbox',
'accounts': []
}
if (config.has_section('imapbox')):
options['local_folder'] = config.get('imapbox', 'local_folder')
for section in config.sections():
if ('imapbox' == section):
continue
account = {
'name': section,
'remote_folder': 'INBOX'
}
account['host'] = config.get(section, 'host')
account['username'] = config.get(section, 'username')
account['password'] = config.get(section, 'password')
if (config.has_option(section, 'remote_folder')):
account['remote_folder'] = config.get(section, 'remote_folder')
if (None == account['host'] or None == account['username'] or None == account['password']):
continue
options['accounts'].append(account)
if (args.local_folder):
options['local_folder'] = args.local_folder
return options
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
options = load_configuration(args)
for account in options['accounts']:
mailbox = MailboxClient(account['host'], account['username'], account['password'], account['remote_folder'])
mailbox.copy_emails(options['local_folder'])
mailbox.cleanup()
if __name__ == '__main__':
main()
|
Load multiple accounts using a config file
|
Load multiple accounts using a config file
|
Python
|
mit
|
polo2ro/imapbox
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-s', dest='host', help="IMAP host, like imap.gmail.com", required=True)
argparser.add_argument('-u', dest='username', help="IMAP username", required=True)
argparser.add_argument('-p', dest='password', help="IMAP password", required=True)
argparser.add_argument('-r', dest='remote_folder', help="Remote folder to download", default='INBOX')
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
mailbox = MailboxClient(args.host, args.username, args.password, args.remote_folder)
mailbox.copy_emails(args.local_folder)
mailbox.cleanup()
if __name__ == '__main__':
main()
Load multiple accounts using a config file
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
import ConfigParser, os
def load_configuration(args):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(['/etc/imapbox/config.cfg', os.path.expanduser('~/.config/imapbox/config.cfg')])
options = {
'local_folder': '/var/imapbox',
'accounts': []
}
if (config.has_section('imapbox')):
options['local_folder'] = config.get('imapbox', 'local_folder')
for section in config.sections():
if ('imapbox' == section):
continue
account = {
'name': section,
'remote_folder': 'INBOX'
}
account['host'] = config.get(section, 'host')
account['username'] = config.get(section, 'username')
account['password'] = config.get(section, 'password')
if (config.has_option(section, 'remote_folder')):
account['remote_folder'] = config.get(section, 'remote_folder')
if (None == account['host'] or None == account['username'] or None == account['password']):
continue
options['accounts'].append(account)
if (args.local_folder):
options['local_folder'] = args.local_folder
return options
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
options = load_configuration(args)
for account in options['accounts']:
mailbox = MailboxClient(account['host'], account['username'], account['password'], account['remote_folder'])
mailbox.copy_emails(options['local_folder'])
mailbox.cleanup()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-s', dest='host', help="IMAP host, like imap.gmail.com", required=True)
argparser.add_argument('-u', dest='username', help="IMAP username", required=True)
argparser.add_argument('-p', dest='password', help="IMAP password", required=True)
argparser.add_argument('-r', dest='remote_folder', help="Remote folder to download", default='INBOX')
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
mailbox = MailboxClient(args.host, args.username, args.password, args.remote_folder)
mailbox.copy_emails(args.local_folder)
mailbox.cleanup()
if __name__ == '__main__':
main()
<commit_msg>Load multiple accounts using a config file<commit_after>
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
import ConfigParser, os
def load_configuration(args):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(['/etc/imapbox/config.cfg', os.path.expanduser('~/.config/imapbox/config.cfg')])
options = {
'local_folder': '/var/imapbox',
'accounts': []
}
if (config.has_section('imapbox')):
options['local_folder'] = config.get('imapbox', 'local_folder')
for section in config.sections():
if ('imapbox' == section):
continue
account = {
'name': section,
'remote_folder': 'INBOX'
}
account['host'] = config.get(section, 'host')
account['username'] = config.get(section, 'username')
account['password'] = config.get(section, 'password')
if (config.has_option(section, 'remote_folder')):
account['remote_folder'] = config.get(section, 'remote_folder')
if (None == account['host'] or None == account['username'] or None == account['password']):
continue
options['accounts'].append(account)
if (args.local_folder):
options['local_folder'] = args.local_folder
return options
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
options = load_configuration(args)
for account in options['accounts']:
mailbox = MailboxClient(account['host'], account['username'], account['password'], account['remote_folder'])
mailbox.copy_emails(options['local_folder'])
mailbox.cleanup()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-s', dest='host', help="IMAP host, like imap.gmail.com", required=True)
argparser.add_argument('-u', dest='username', help="IMAP username", required=True)
argparser.add_argument('-p', dest='password', help="IMAP password", required=True)
argparser.add_argument('-r', dest='remote_folder', help="Remote folder to download", default='INBOX')
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
mailbox = MailboxClient(args.host, args.username, args.password, args.remote_folder)
mailbox.copy_emails(args.local_folder)
mailbox.cleanup()
if __name__ == '__main__':
main()
Load multiple accounts using a config file#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
import ConfigParser, os
def load_configuration(args):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(['/etc/imapbox/config.cfg', os.path.expanduser('~/.config/imapbox/config.cfg')])
options = {
'local_folder': '/var/imapbox',
'accounts': []
}
if (config.has_section('imapbox')):
options['local_folder'] = config.get('imapbox', 'local_folder')
for section in config.sections():
if ('imapbox' == section):
continue
account = {
'name': section,
'remote_folder': 'INBOX'
}
account['host'] = config.get(section, 'host')
account['username'] = config.get(section, 'username')
account['password'] = config.get(section, 'password')
if (config.has_option(section, 'remote_folder')):
account['remote_folder'] = config.get(section, 'remote_folder')
if (None == account['host'] or None == account['username'] or None == account['password']):
continue
options['accounts'].append(account)
if (args.local_folder):
options['local_folder'] = args.local_folder
return options
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
options = load_configuration(args)
for account in options['accounts']:
mailbox = MailboxClient(account['host'], account['username'], account['password'], account['remote_folder'])
mailbox.copy_emails(options['local_folder'])
mailbox.cleanup()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-s', dest='host', help="IMAP host, like imap.gmail.com", required=True)
argparser.add_argument('-u', dest='username', help="IMAP username", required=True)
argparser.add_argument('-p', dest='password', help="IMAP password", required=True)
argparser.add_argument('-r', dest='remote_folder', help="Remote folder to download", default='INBOX')
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
mailbox = MailboxClient(args.host, args.username, args.password, args.remote_folder)
mailbox.copy_emails(args.local_folder)
mailbox.cleanup()
if __name__ == '__main__':
main()
<commit_msg>Load multiple accounts using a config file<commit_after>#!/usr/bin/env python
#-*- coding:utf-8 -*-
# import mailboxresource
from mailboxresource import MailboxClient
import argparse
import ConfigParser, os
def load_configuration(args):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(['/etc/imapbox/config.cfg', os.path.expanduser('~/.config/imapbox/config.cfg')])
options = {
'local_folder': '/var/imapbox',
'accounts': []
}
if (config.has_section('imapbox')):
options['local_folder'] = config.get('imapbox', 'local_folder')
for section in config.sections():
if ('imapbox' == section):
continue
account = {
'name': section,
'remote_folder': 'INBOX'
}
account['host'] = config.get(section, 'host')
account['username'] = config.get(section, 'username')
account['password'] = config.get(section, 'password')
if (config.has_option(section, 'remote_folder')):
account['remote_folder'] = config.get(section, 'remote_folder')
if (None == account['host'] or None == account['username'] or None == account['password']):
continue
options['accounts'].append(account)
if (args.local_folder):
options['local_folder'] = args.local_folder
return options
def main():
argparser = argparse.ArgumentParser(description="Dump a IMAP folder into .eml files")
argparser.add_argument('-l', dest='local_folder', help="Local folder where to create the email folders", default='.')
args = argparser.parse_args()
options = load_configuration(args)
for account in options['accounts']:
mailbox = MailboxClient(account['host'], account['username'], account['password'], account['remote_folder'])
mailbox.copy_emails(options['local_folder'])
mailbox.cleanup()
if __name__ == '__main__':
main()
|
80d5fc4ec2711dde9bd7cf775e9376190c46f9f7
|
aeromancer/project_filter.py
|
aeromancer/project_filter.py
|
import argparse
from aeromancer.db.models import Project
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help='projects to limit search, by exact name',
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.projects = projects
def update_query(self, query):
if self.projects:
query = query.filter(
Project.name.in_(self.projects)
)
return query
|
import argparse
import logging
from aeromancer.db.models import Project
LOG = logging.getLogger(__name__)
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help=('projects to limit search, '
'by exact name or glob-style patterns'),
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.exact = []
self.patterns = []
for p in projects:
if '*' in p:
self.patterns.append(p.replace('*', '%'))
else:
self.exact.append(p)
self.projects = projects
def update_query(self, query):
the_filter = ()
if self.exact:
LOG.debug('filtering on projects in %s', self.exact)
the_filter += (Project.name.in_(self.exact),)
if self.patterns:
LOG.debug('filtering on projects matching %s', self.patterns)
the_filter += tuple(Project.name.ilike(p)
for p in self.patterns)
if the_filter:
query = query.filter(*the_filter)
return query
|
Support project filter with wildcard
|
Support project filter with wildcard
Translate glob-style wildcards to ILIKE comparisons for the database.
|
Python
|
apache-2.0
|
openstack/aeromancer,stackforge/aeromancer,dhellmann/aeromancer
|
import argparse
from aeromancer.db.models import Project
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help='projects to limit search, by exact name',
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.projects = projects
def update_query(self, query):
if self.projects:
query = query.filter(
Project.name.in_(self.projects)
)
return query
Support project filter with wildcard
Translate glob-style wildcards to ILIKE comparisons for the database.
|
import argparse
import logging
from aeromancer.db.models import Project
LOG = logging.getLogger(__name__)
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help=('projects to limit search, '
'by exact name or glob-style patterns'),
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.exact = []
self.patterns = []
for p in projects:
if '*' in p:
self.patterns.append(p.replace('*', '%'))
else:
self.exact.append(p)
self.projects = projects
def update_query(self, query):
the_filter = ()
if self.exact:
LOG.debug('filtering on projects in %s', self.exact)
the_filter += (Project.name.in_(self.exact),)
if self.patterns:
LOG.debug('filtering on projects matching %s', self.patterns)
the_filter += tuple(Project.name.ilike(p)
for p in self.patterns)
if the_filter:
query = query.filter(*the_filter)
return query
|
<commit_before>import argparse
from aeromancer.db.models import Project
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help='projects to limit search, by exact name',
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.projects = projects
def update_query(self, query):
if self.projects:
query = query.filter(
Project.name.in_(self.projects)
)
return query
<commit_msg>Support project filter with wildcard
Translate glob-style wildcards to ILIKE comparisons for the database.<commit_after>
|
import argparse
import logging
from aeromancer.db.models import Project
LOG = logging.getLogger(__name__)
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help=('projects to limit search, '
'by exact name or glob-style patterns'),
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.exact = []
self.patterns = []
for p in projects:
if '*' in p:
self.patterns.append(p.replace('*', '%'))
else:
self.exact.append(p)
self.projects = projects
def update_query(self, query):
the_filter = ()
if self.exact:
LOG.debug('filtering on projects in %s', self.exact)
the_filter += (Project.name.in_(self.exact),)
if self.patterns:
LOG.debug('filtering on projects matching %s', self.patterns)
the_filter += tuple(Project.name.ilike(p)
for p in self.patterns)
if the_filter:
query = query.filter(*the_filter)
return query
|
import argparse
from aeromancer.db.models import Project
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help='projects to limit search, by exact name',
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.projects = projects
def update_query(self, query):
if self.projects:
query = query.filter(
Project.name.in_(self.projects)
)
return query
Support project filter with wildcard
Translate glob-style wildcards to ILIKE comparisons for the database.import argparse
import logging
from aeromancer.db.models import Project
LOG = logging.getLogger(__name__)
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help=('projects to limit search, '
'by exact name or glob-style patterns'),
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.exact = []
self.patterns = []
for p in projects:
if '*' in p:
self.patterns.append(p.replace('*', '%'))
else:
self.exact.append(p)
self.projects = projects
def update_query(self, query):
the_filter = ()
if self.exact:
LOG.debug('filtering on projects in %s', self.exact)
the_filter += (Project.name.in_(self.exact),)
if self.patterns:
LOG.debug('filtering on projects matching %s', self.patterns)
the_filter += tuple(Project.name.ilike(p)
for p in self.patterns)
if the_filter:
query = query.filter(*the_filter)
return query
|
<commit_before>import argparse
from aeromancer.db.models import Project
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help='projects to limit search, by exact name',
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.projects = projects
def update_query(self, query):
if self.projects:
query = query.filter(
Project.name.in_(self.projects)
)
return query
<commit_msg>Support project filter with wildcard
Translate glob-style wildcards to ILIKE comparisons for the database.<commit_after>import argparse
import logging
from aeromancer.db.models import Project
LOG = logging.getLogger(__name__)
class ProjectFilter(object):
"""Manage the arguments for filtering queries by project.
"""
@staticmethod
def add_arguments(parser):
"""Given an argparse.ArgumentParser add arguments.
"""
grp = parser.add_argument_group('Project Filter')
grp.add_argument(
'--project',
action='append',
default=[],
dest='projects',
help=('projects to limit search, '
'by exact name or glob-style patterns'),
)
@classmethod
def from_parsed_args(cls, parsed_args):
return cls(projects=parsed_args.projects)
def __init__(self, projects):
self.exact = []
self.patterns = []
for p in projects:
if '*' in p:
self.patterns.append(p.replace('*', '%'))
else:
self.exact.append(p)
self.projects = projects
def update_query(self, query):
the_filter = ()
if self.exact:
LOG.debug('filtering on projects in %s', self.exact)
the_filter += (Project.name.in_(self.exact),)
if self.patterns:
LOG.debug('filtering on projects matching %s', self.patterns)
the_filter += tuple(Project.name.ilike(p)
for p in self.patterns)
if the_filter:
query = query.filter(*the_filter)
return query
|
f4f529aca5a37a19c3445ec7fc572ece08ba4293
|
examples/hosts-production.py
|
examples/hosts-production.py
|
#!/usr/bin/env python3
# (c) 2014 Brainly.com, Pawel Rozlach <pawel.rozlach@brainly.com>
# This script is intended to only find "where it is" and invoke the inventory
# tool with correct inventory path basing on it's name.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
|
#!/usr/bin/env python3
# Copyright (c) 2014 Pawel Rozlach, Brainly.com sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
|
Add missing preamble for wrapper script
|
Add missing preamble for wrapper script
Change-Id: I26d002ca739544bc3d431a6cdb6b441bd33deb5a
|
Python
|
apache-2.0
|
brainly/inventory_tool,vespian/inventory_tool
|
#!/usr/bin/env python3
# (c) 2014 Brainly.com, Pawel Rozlach <pawel.rozlach@brainly.com>
# This script is intended to only find "where it is" and invoke the inventory
# tool with correct inventory path basing on it's name.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
Add missing preamble for wrapper script
Change-Id: I26d002ca739544bc3d431a6cdb6b441bd33deb5a
|
#!/usr/bin/env python3
# Copyright (c) 2014 Pawel Rozlach, Brainly.com sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
|
<commit_before>#!/usr/bin/env python3
# (c) 2014 Brainly.com, Pawel Rozlach <pawel.rozlach@brainly.com>
# This script is intended to only find "where it is" and invoke the inventory
# tool with correct inventory path basing on it's name.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
<commit_msg>Add missing preamble for wrapper script
Change-Id: I26d002ca739544bc3d431a6cdb6b441bd33deb5a<commit_after>
|
#!/usr/bin/env python3
# Copyright (c) 2014 Pawel Rozlach, Brainly.com sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
|
#!/usr/bin/env python3
# (c) 2014 Brainly.com, Pawel Rozlach <pawel.rozlach@brainly.com>
# This script is intended to only find "where it is" and invoke the inventory
# tool with correct inventory path basing on it's name.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
Add missing preamble for wrapper script
Change-Id: I26d002ca739544bc3d431a6cdb6b441bd33deb5a#!/usr/bin/env python3
# Copyright (c) 2014 Pawel Rozlach, Brainly.com sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
|
<commit_before>#!/usr/bin/env python3
# (c) 2014 Brainly.com, Pawel Rozlach <pawel.rozlach@brainly.com>
# This script is intended to only find "where it is" and invoke the inventory
# tool with correct inventory path basing on it's name.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
<commit_msg>Add missing preamble for wrapper script
Change-Id: I26d002ca739544bc3d431a6cdb6b441bd33deb5a<commit_after>#!/usr/bin/env python3
# Copyright (c) 2014 Pawel Rozlach, Brainly.com sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import os.path as op
import sys
# Configuration:
backend_domain = 'example.com'
ipaddress_keywords = ["tunnel_ip", ]
ipnetwork_keywords = []
inventorytool_path = '..'
inventory_path = '../test/fabric/'
# Where am I ?
cwd = op.dirname(op.realpath(__file__))
# Import inventory_tool
if inventorytool_path is not None:
sys.path.insert(0, op.abspath(op.join(cwd, inventorytool_path)))
import inventory_tool
# Locate the inventory file:
name = op.basename(sys.argv[0]).split(".")[0] + ".yml"
inventory_path = op.abspath(op.join(cwd, inventory_path, name))
if __name__ == '__main__':
inventory_tool.main(sys.argv,
inventory_path,
backend_domain=backend_domain,
extra_ipaddress_keywords=ipaddress_keywords,
extra_ipnetwork_keywords=ipnetwork_keywords,
)
|
b856207fe42d480975618f5749ef9febc84f0363
|
geolocation_helper/models.py
|
geolocation_helper/models.py
|
from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = None
|
from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
def is_geolocated(self):
"""
Usefull for example in the admin in order to easily identify non geolocated object
"""
return self.geom is not None
is_geolocated.boolean = True
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = None
|
Add is_geolocated property for admin purpuse
|
Add is_geolocated property for admin purpuse
|
Python
|
bsd-2-clause
|
atiberghien/django-geolocation-helper,atiberghien/django-geolocation-helper
|
from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = NoneAdd is_geolocated property for admin purpuse
|
from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
def is_geolocated(self):
"""
Usefull for example in the admin in order to easily identify non geolocated object
"""
return self.geom is not None
is_geolocated.boolean = True
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = None
|
<commit_before>from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = None<commit_msg>Add is_geolocated property for admin purpuse<commit_after>
|
from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
def is_geolocated(self):
"""
Usefull for example in the admin in order to easily identify non geolocated object
"""
return self.geom is not None
is_geolocated.boolean = True
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = None
|
from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = NoneAdd is_geolocated property for admin purpusefrom django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
def is_geolocated(self):
"""
Usefull for example in the admin in order to easily identify non geolocated object
"""
return self.geom is not None
is_geolocated.boolean = True
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = None
|
<commit_before>from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = None<commit_msg>Add is_geolocated property for admin purpuse<commit_after>from django.contrib.gis.db import models as geomodels
from django.contrib.gis.geos.point import Point
from geopy import geocoders
class GeoLocatedModel(geomodels.Model):
geom = geomodels.PointField(null=True, blank=True)
objects = geomodels.GeoManager()
def get_location_as_string(self):
"""
Should return a string for the address as Google Maps format
"""
raise NotImplementedError
def is_geolocated(self):
"""
Usefull for example in the admin in order to easily identify non geolocated object
"""
return self.geom is not None
is_geolocated.boolean = True
class Meta:
abstract = True
def update_geolocation(sender, instance, **kwargs):
"""
This signal receiver update the instance but does not save it
Should be used with pre_save signal
"""
g = geocoders.GoogleV3()
try:
place, (lat, lng) = g.geocode(instance.get_location_as_string())
instance.geom = Point(lng, lat)
except:
instance.geom = None
|
7bace98978e0058489b4872d7af300d91fe7f55d
|
createAdminOnce.py
|
createAdminOnce.py
|
#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
# call_command('createadmin', password=os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
|
#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
print 'Admin user has been created! Login with ' + os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com')
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
|
Clarify admin user creation message
|
Clarify admin user creation message
|
Python
|
mit
|
beevelop/docker-weblate,beevelop/docker-weblate
|
#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
# call_command('createadmin', password=os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
Clarify admin user creation message
|
#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
print 'Admin user has been created! Login with ' + os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com')
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
|
<commit_before>#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
# call_command('createadmin', password=os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
<commit_msg>Clarify admin user creation message<commit_after>
|
#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
print 'Admin user has been created! Login with ' + os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com')
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
|
#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
# call_command('createadmin', password=os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
Clarify admin user creation message#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
print 'Admin user has been created! Login with ' + os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com')
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
|
<commit_before>#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
# call_command('createadmin', password=os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
<commit_msg>Clarify admin user creation message<commit_after>#!/usr/bin/env python
import django
django.setup()
import sys,os
from django.contrib.auth.models import User
from django.core.management import call_command
admin_username = os.getenv('WEBLATE_ADMIN_NAME', 'admin')
try:
user = User.objects.get(username=admin_username)
except:
print 'Creating Admin...'
User.objects.create_superuser(admin_username, os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com'), os.getenv('ADMIN_PASSWORD', 'Un1c0rn'))
print 'Admin user has been created! Login with ' + os.getenv('WEBLATE_ADMIN_EMAIL', 'admin@example.com')
sys.exit(0)
else:
print 'Admin seems to exist. Not creating admin...'
sys.exit(0)
|
ed05dbf4dc231ea659b19310e6065d4781bd18bc
|
code/tests/test_smoothing.py
|
code/tests/test_smoothing.py
|
"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
"""
==================Test file for smoothing.py======================
Test convolution module, hrf function and convolve function
Run with:
nosetests nosetests code/tests/test_smoothing.py
"""
from __future__ import absolute_import, division, print_function
from nose.tools import assert_equal
from numpy.testing import assert_almost_equal, assert_array_equal
import numpy as np
import sys
sys.path.append("code/utils")
from smoothing import *
import make_class
subtest_runtest1 = make_class.run("test", "001", filtered_data=True)
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
Add seperate test function for smoothing.py
|
Add seperate test function for smoothing.py
|
Python
|
bsd-3-clause
|
berkeley-stat159/project-delta
|
"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)Add seperate test function for smoothing.py
|
"""
==================Test file for smoothing.py======================
Test convolution module, hrf function and convolve function
Run with:
nosetests nosetests code/tests/test_smoothing.py
"""
from __future__ import absolute_import, division, print_function
from nose.tools import assert_equal
from numpy.testing import assert_almost_equal, assert_array_equal
import numpy as np
import sys
sys.path.append("code/utils")
from smoothing import *
import make_class
subtest_runtest1 = make_class.run("test", "001", filtered_data=True)
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
<commit_before>"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)<commit_msg>Add seperate test function for smoothing.py<commit_after>
|
"""
==================Test file for smoothing.py======================
Test convolution module, hrf function and convolve function
Run with:
nosetests nosetests code/tests/test_smoothing.py
"""
from __future__ import absolute_import, division, print_function
from nose.tools import assert_equal
from numpy.testing import assert_almost_equal, assert_array_equal
import numpy as np
import sys
sys.path.append("code/utils")
from smoothing import *
import make_class
subtest_runtest1 = make_class.run("test", "001", filtered_data=True)
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)Add seperate test function for smoothing.py"""
==================Test file for smoothing.py======================
Test convolution module, hrf function and convolve function
Run with:
nosetests nosetests code/tests/test_smoothing.py
"""
from __future__ import absolute_import, division, print_function
from nose.tools import assert_equal
from numpy.testing import assert_almost_equal, assert_array_equal
import numpy as np
import sys
sys.path.append("code/utils")
from smoothing import *
import make_class
subtest_runtest1 = make_class.run("test", "001", filtered_data=True)
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
<commit_before>"""
Tests functions in smoothing.py
Run with:
nosetests test_smoothing.py
"""
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)<commit_msg>Add seperate test function for smoothing.py<commit_after>"""
==================Test file for smoothing.py======================
Test convolution module, hrf function and convolve function
Run with:
nosetests nosetests code/tests/test_smoothing.py
"""
from __future__ import absolute_import, division, print_function
from nose.tools import assert_equal
from numpy.testing import assert_almost_equal, assert_array_equal
import numpy as np
import sys
sys.path.append("code/utils")
from smoothing import *
import make_class
subtest_runtest1 = make_class.run("test", "001", filtered_data=True)
# Test method .smooth()
smooth1, smooth2 = subtest_runtest1.smooth(0), subtest_runtest1.smooth(1, 5)
smooth3 = subtest_runtest1.smooth(2, 0.25)
assert [smooth1.max(), smooth1.shape, smooth1.sum()] == [0, (3, 3, 3), 0]
assert [smooth2.max(), smooth2.shape, smooth2.sum()] == [1, (3, 3, 3), 27]
assert [smooth3.max(), smooth3.shape, smooth3.sum()] == [8, (3, 3, 3), 108]
assert [smooth1.std(), smooth2.std()] == [0, 0]
assert_almost_equal(smooth3.std(), 1.6329931618554521)
|
e9171e8d77b457e2c96fca37c89d68c518bec5f7
|
src/urllib3/util/util.py
|
src/urllib3/util/util.py
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
Bring coverage back to 100%
|
Bring coverage back to 100%
All calls to reraise() are in branches where value is truthy, so we
can't reach that code.
|
Python
|
mit
|
sigmavirus24/urllib3,sigmavirus24/urllib3,urllib3/urllib3,urllib3/urllib3
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
Bring coverage back to 100%
All calls to reraise() are in branches where value is truthy, so we
can't reach that code.
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
<commit_before>from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
<commit_msg>Bring coverage back to 100%
All calls to reraise() are in branches where value is truthy, so we
can't reach that code.<commit_after>
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
Bring coverage back to 100%
All calls to reraise() are in branches where value is truthy, so we
can't reach that code.from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
<commit_before>from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
<commit_msg>Bring coverage back to 100%
All calls to reraise() are in branches where value is truthy, so we
can't reach that code.<commit_after>from types import TracebackType
from typing import NoReturn, Optional, Type, Union
def to_bytes(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> bytes:
if isinstance(x, bytes):
return x
elif not isinstance(x, str):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.encode(encoding or "utf-8", errors=errors or "strict")
return x.encode()
def to_str(
x: Union[str, bytes], encoding: Optional[str] = None, errors: Optional[str] = None
) -> str:
if isinstance(x, str):
return x
elif not isinstance(x, bytes):
raise TypeError(f"not expecting type {type(x).__name__}")
elif encoding or errors:
return x.decode(encoding or "utf-8", errors=errors or "strict")
return x.decode()
def reraise(
tp: Optional[Type[BaseException]],
value: Optional[BaseException],
tb: Optional[TracebackType] = None,
) -> NoReturn:
try:
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
finally:
value = None
tb = None
|
80e67ffa99cc911219b316b172d7c74e1ede5c50
|
camera_selection/scripts/camera_selection.py
|
camera_selection/scripts/camera_selection.py
|
#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
GPIO_PIN_MAP = rospy.get_param('/camera/pin_map')
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for selection_pin in GPIO_PIN_MAP:
GPIO.setup(GPIO_PIN_MAP[selection_pin], GPIO.OUT)
#Set to ground
for selection_pin in GPIO_PIN_MAP:
GPIO.output(GPIO_PIN_MAP[selection_pin], GPIO.LOW)
def callback(self, msg):
pass
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
|
#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
PIN_MAP_FEED0 = rospy.get_param('/camera/pin_map_feed0')
PIN_MAP_FEED1 = rospy.get_param('/camera/pin_map_feed1')
PIN_MAP_FEED2 = rospy.get_param('/camera/pin_map_feed2')
PIN_MAP_LIST = [PIN_MAP_FEED0,PIN_MAP_FEED1,PIN_MAP_FEED2]
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for pin_list in PIN_MAP_LIST:
for pin in pin_list:
GPIO.setup(pin, GPIO.OUT)
def callback(self, msg):
feed_pin_map = PIN_MAP_LIST[msg.feed]
for i, level in enumerate(msg.pin_values):
if level:
GPIO.output(feed_pin_map[i], GPIO.HIGH)
else
GPIO.output(feed_pin_map[i], GPIO.LOW)
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
|
Add callback, set camera feed selection pins based on msg
|
Add callback, set camera feed selection pins based on msg
|
Python
|
mit
|
vortexntnu/rov-control,vortexntnu/rov-control,vortexntnu/rov-control
|
#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
GPIO_PIN_MAP = rospy.get_param('/camera/pin_map')
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for selection_pin in GPIO_PIN_MAP:
GPIO.setup(GPIO_PIN_MAP[selection_pin], GPIO.OUT)
#Set to ground
for selection_pin in GPIO_PIN_MAP:
GPIO.output(GPIO_PIN_MAP[selection_pin], GPIO.LOW)
def callback(self, msg):
pass
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
Add callback, set camera feed selection pins based on msg
|
#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
PIN_MAP_FEED0 = rospy.get_param('/camera/pin_map_feed0')
PIN_MAP_FEED1 = rospy.get_param('/camera/pin_map_feed1')
PIN_MAP_FEED2 = rospy.get_param('/camera/pin_map_feed2')
PIN_MAP_LIST = [PIN_MAP_FEED0,PIN_MAP_FEED1,PIN_MAP_FEED2]
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for pin_list in PIN_MAP_LIST:
for pin in pin_list:
GPIO.setup(pin, GPIO.OUT)
def callback(self, msg):
feed_pin_map = PIN_MAP_LIST[msg.feed]
for i, level in enumerate(msg.pin_values):
if level:
GPIO.output(feed_pin_map[i], GPIO.HIGH)
else
GPIO.output(feed_pin_map[i], GPIO.LOW)
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
|
<commit_before>#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
GPIO_PIN_MAP = rospy.get_param('/camera/pin_map')
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for selection_pin in GPIO_PIN_MAP:
GPIO.setup(GPIO_PIN_MAP[selection_pin], GPIO.OUT)
#Set to ground
for selection_pin in GPIO_PIN_MAP:
GPIO.output(GPIO_PIN_MAP[selection_pin], GPIO.LOW)
def callback(self, msg):
pass
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
<commit_msg>Add callback, set camera feed selection pins based on msg<commit_after>
|
#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
PIN_MAP_FEED0 = rospy.get_param('/camera/pin_map_feed0')
PIN_MAP_FEED1 = rospy.get_param('/camera/pin_map_feed1')
PIN_MAP_FEED2 = rospy.get_param('/camera/pin_map_feed2')
PIN_MAP_LIST = [PIN_MAP_FEED0,PIN_MAP_FEED1,PIN_MAP_FEED2]
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for pin_list in PIN_MAP_LIST:
for pin in pin_list:
GPIO.setup(pin, GPIO.OUT)
def callback(self, msg):
feed_pin_map = PIN_MAP_LIST[msg.feed]
for i, level in enumerate(msg.pin_values):
if level:
GPIO.output(feed_pin_map[i], GPIO.HIGH)
else
GPIO.output(feed_pin_map[i], GPIO.LOW)
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
|
#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
GPIO_PIN_MAP = rospy.get_param('/camera/pin_map')
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for selection_pin in GPIO_PIN_MAP:
GPIO.setup(GPIO_PIN_MAP[selection_pin], GPIO.OUT)
#Set to ground
for selection_pin in GPIO_PIN_MAP:
GPIO.output(GPIO_PIN_MAP[selection_pin], GPIO.LOW)
def callback(self, msg):
pass
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
Add callback, set camera feed selection pins based on msg#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
PIN_MAP_FEED0 = rospy.get_param('/camera/pin_map_feed0')
PIN_MAP_FEED1 = rospy.get_param('/camera/pin_map_feed1')
PIN_MAP_FEED2 = rospy.get_param('/camera/pin_map_feed2')
PIN_MAP_LIST = [PIN_MAP_FEED0,PIN_MAP_FEED1,PIN_MAP_FEED2]
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for pin_list in PIN_MAP_LIST:
for pin in pin_list:
GPIO.setup(pin, GPIO.OUT)
def callback(self, msg):
feed_pin_map = PIN_MAP_LIST[msg.feed]
for i, level in enumerate(msg.pin_values):
if level:
GPIO.output(feed_pin_map[i], GPIO.HIGH)
else
GPIO.output(feed_pin_map[i], GPIO.LOW)
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
|
<commit_before>#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
GPIO_PIN_MAP = rospy.get_param('/camera/pin_map')
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for selection_pin in GPIO_PIN_MAP:
GPIO.setup(GPIO_PIN_MAP[selection_pin], GPIO.OUT)
#Set to ground
for selection_pin in GPIO_PIN_MAP:
GPIO.output(GPIO_PIN_MAP[selection_pin], GPIO.LOW)
def callback(self, msg):
pass
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
<commit_msg>Add callback, set camera feed selection pins based on msg<commit_after>#!/usr/bin/env python
import rospy
import Adafruit_BBIO.GPIO as GPIO
from vortex_msgs.msg import CameraFeedSelection
PIN_MAP_FEED0 = rospy.get_param('/camera/pin_map_feed0')
PIN_MAP_FEED1 = rospy.get_param('/camera/pin_map_feed1')
PIN_MAP_FEED2 = rospy.get_param('/camera/pin_map_feed2')
PIN_MAP_LIST = [PIN_MAP_FEED0,PIN_MAP_FEED1,PIN_MAP_FEED2]
class CameraSelection(object):
def __init__(self):
rospy.init_node('camera_selection')
self.cam_select_sub = rospy.Subscriber('camera_feed_selection',CameraFeedSelection, self.callback)
#Set pin as output
for pin_list in PIN_MAP_LIST:
for pin in pin_list:
GPIO.setup(pin, GPIO.OUT)
def callback(self, msg):
feed_pin_map = PIN_MAP_LIST[msg.feed]
for i, level in enumerate(msg.pin_values):
if level:
GPIO.output(feed_pin_map[i], GPIO.HIGH)
else
GPIO.output(feed_pin_map[i], GPIO.LOW)
if __name__ == '__main__':
try:
camera_selection = CameraSelection()
rospy.spin()
except rospy.ROSInterruptException:
pass
|
262bfb89311b51ce2de74271c4717282d53cedc6
|
sandbox/sandbox/urls.py
|
sandbox/sandbox/urls.py
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'', include(shop.urls)),
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Switch order of store URLs
|
Switch order of store URLs
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar-stores,django-oscar/django-oscar-stores,django-oscar/django-oscar-stores
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'', include(shop.urls)),
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Switch order of store URLs
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'', include(shop.urls)),
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Switch order of store URLs<commit_after>
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'', include(shop.urls)),
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Switch order of store URLsfrom django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'', include(shop.urls)),
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Switch order of store URLs<commit_after>from django.conf import settings
from django.contrib import admin
from django.conf.urls.static import static
from django.conf.urls import patterns, include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from oscar.app import shop
from stores.app import application as stores_app
from stores.dashboard.app import application as dashboard_app
admin.autodiscover()
urlpatterns = patterns('',
url(r'^dashboard/stores/', include(dashboard_app.urls)),
url(r'^stores/', include(stores_app.urls)),
url(r'^', include(shop.urls)),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
f9572834538d40f5ae58e2f611fed7bf22af6311
|
templatemailer/mailer.py
|
templatemailer/mailer.py
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def email_user(user, template, context, attachments=None, delete_attachments_after_send=False, send_to=None,
language_code=None):
'''
Send email to user
:param user: User instance or None if no DB user is used
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param send_to: email address to send (or None, to use user email address)
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
send_to=send_to,
language_code=language_code
)
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
Rename email_user method to send_email and fix parameters
|
Rename email_user method to send_email and fix parameters
|
Python
|
mit
|
tuomasjaanu/django-templatemailer
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def email_user(user, template, context, attachments=None, delete_attachments_after_send=False, send_to=None,
language_code=None):
'''
Send email to user
:param user: User instance or None if no DB user is used
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param send_to: email address to send (or None, to use user email address)
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
send_to=send_to,
language_code=language_code
)
Rename email_user method to send_email and fix parameters
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
<commit_before>import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def email_user(user, template, context, attachments=None, delete_attachments_after_send=False, send_to=None,
language_code=None):
'''
Send email to user
:param user: User instance or None if no DB user is used
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param send_to: email address to send (or None, to use user email address)
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
send_to=send_to,
language_code=language_code
)
<commit_msg>Rename email_user method to send_email and fix parameters<commit_after>
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def email_user(user, template, context, attachments=None, delete_attachments_after_send=False, send_to=None,
language_code=None):
'''
Send email to user
:param user: User instance or None if no DB user is used
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param send_to: email address to send (or None, to use user email address)
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
send_to=send_to,
language_code=language_code
)
Rename email_user method to send_email and fix parametersimport logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
<commit_before>import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def email_user(user, template, context, attachments=None, delete_attachments_after_send=False, send_to=None,
language_code=None):
'''
Send email to user
:param user: User instance or None if no DB user is used
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param send_to: email address to send (or None, to use user email address)
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
send_to=send_to,
language_code=language_code
)
<commit_msg>Rename email_user method to send_email and fix parameters<commit_after>import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
4b34053ce422e9be15e0ac386a591f8052a778b1
|
vcs/models.py
|
vcs/models.py
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
from timetracker.tracker.models import Tbluser
user = models.ManyToManyField(
Tbluser,
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
Use the app string version of foreign keying. It prevents a circular import.
|
Use the app string version of foreign keying. It prevents a circular import.
|
Python
|
bsd-3-clause
|
AeroNotix/django-timetracker,AeroNotix/django-timetracker,AeroNotix/django-timetracker
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
from timetracker.tracker.models import Tbluser
user = models.ManyToManyField(
Tbluser,
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
Use the app string version of foreign keying. It prevents a circular import.
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
<commit_before>from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
from timetracker.tracker.models import Tbluser
user = models.ManyToManyField(
Tbluser,
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
<commit_msg>Use the app string version of foreign keying. It prevents a circular import.<commit_after>
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
from timetracker.tracker.models import Tbluser
user = models.ManyToManyField(
Tbluser,
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
Use the app string version of foreign keying. It prevents a circular import.from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
<commit_before>from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
from timetracker.tracker.models import Tbluser
user = models.ManyToManyField(
Tbluser,
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
<commit_msg>Use the app string version of foreign keying. It prevents a circular import.<commit_after>from django.db import models
class Activity(models.Model):
group = models.CharField(max_length=4)
grouptype = models.TextField()
groupdetail = models.TextField()
details = models.TextField()
disabled = models.BooleanField()
time = models.DecimalField(decimal_places=2, max_digits=10)
unique_together = (("group", "grouptype", "disabled", "time"),)
class ActivityEntry(models.Model):
user = models.ManyToManyField(
'tracker.Tbluser',
related_name="user_foreign"
)
activity = models.ManyToManyField(
Activity,
related_name="activity_foreign"
)
amount = models.BigIntegerField()
def time(self):
return self.activity.time * self.amount
|
eef628750711b8bb4b08eb5f913b731d76541ab1
|
shop_catalog/filters.py
|
shop_catalog/filters.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
try:
return queryset.get(pk=self.value()).variants.all()
except Product.DoesNotExist:
pass
return queryset
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import Q
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
queryset = queryset.filter(
Q(pk=self.value()) | Q(parent_id=self.value()))
return queryset
|
Modify parent filter to return variants and self
|
Modify parent filter to return variants and self
|
Python
|
bsd-3-clause
|
dinoperovic/django-shop-catalog,dinoperovic/django-shop-catalog
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
try:
return queryset.get(pk=self.value()).variants.all()
except Product.DoesNotExist:
pass
return queryset
Modify parent filter to return variants and self
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import Q
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
queryset = queryset.filter(
Q(pk=self.value()) | Q(parent_id=self.value()))
return queryset
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
try:
return queryset.get(pk=self.value()).variants.all()
except Product.DoesNotExist:
pass
return queryset
<commit_msg>Modify parent filter to return variants and self<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import Q
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
queryset = queryset.filter(
Q(pk=self.value()) | Q(parent_id=self.value()))
return queryset
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
try:
return queryset.get(pk=self.value()).variants.all()
except Product.DoesNotExist:
pass
return queryset
Modify parent filter to return variants and self# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import Q
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
queryset = queryset.filter(
Q(pk=self.value()) | Q(parent_id=self.value()))
return queryset
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
try:
return queryset.get(pk=self.value()).variants.all()
except Product.DoesNotExist:
pass
return queryset
<commit_msg>Modify parent filter to return variants and self<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db.models import Q
from django.contrib.admin import SimpleListFilter
from django.utils.translation import ugettext_lazy as _
from shop_catalog.models import Product
class ProductParentListFilter(SimpleListFilter):
title = _('Parent')
parameter_name = 'parent'
def lookups(self, request, model_admin):
lookups = ()
for product in Product.objects.all():
if product.is_group:
lookups += (product.pk, product.get_name()),
return lookups
def queryset(self, request, queryset):
if self.value():
queryset = queryset.filter(
Q(pk=self.value()) | Q(parent_id=self.value()))
return queryset
|
ae201ae3c8b3f25f96bea55f9a69c3612a74c7cf
|
inboxen/tests/settings.py
|
inboxen/tests/settings.py
|
from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
|
from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
|
Use local memory as cache so ratelimit tests don't fail
|
Use local memory as cache so ratelimit tests don't fail
|
Python
|
agpl-3.0
|
Inboxen/Inboxen,Inboxen/infrastructure,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen
|
from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
Use local memory as cache so ratelimit tests don't fail
|
from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
|
<commit_before>from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
<commit_msg>Use local memory as cache so ratelimit tests don't fail<commit_after>
|
from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
|
from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
Use local memory as cache so ratelimit tests don't failfrom __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
|
<commit_before>from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.dummy.DummyCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
<commit_msg>Use local memory as cache so ratelimit tests don't fail<commit_after>from __future__ import absolute_import
import os
os.environ['INBOX_TESTING'] = '1'
from settings import *
CACHES = {
"default": {
"BACKEND": "django.core.cache.backends.locmem.LocMemCache"
}
}
db = os.environ.get('DB')
SECRET_KEY = "This is a test, you don't need secrets"
if db == "sqlite":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
},
}
elif db == "postgres":
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'inboxen',
'USER': 'postgres',
},
}
else:
raise NotImplementedError("Please check tests/settings.py for valid DB values")
|
371fb9b90d452f8893446e4968659d2e1ff58676
|
mopidy/backends/spotify/container_manager.py
|
mopidy/backends/spotify/container_manager.py
|
import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify."""
logger.debug(u'Container loaded')
self.session_manager.refresh_stored_playlists()
|
import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist container loaded')
self.session_manager.refresh_stored_playlists()
def playlist_added(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist "%s" added at position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_moved(self, container, playlist, old_position, new_position,
userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" moved from position %d to %d',
playlist.name(), old_position, new_position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_removed(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" removed from position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
|
Add missing container callbacks with debug log statements
|
Add missing container callbacks with debug log statements
|
Python
|
apache-2.0
|
swak/mopidy,dbrgn/mopidy,rawdlite/mopidy,jmarsik/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,quartz55/mopidy,bacontext/mopidy,dbrgn/mopidy,pacificIT/mopidy,diandiankan/mopidy,mokieyue/mopidy,swak/mopidy,jodal/mopidy,mokieyue/mopidy,tkem/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,ali/mopidy,mokieyue/mopidy,kingosticks/mopidy,tkem/mopidy,abarisain/mopidy,mopidy/mopidy,jmarsik/mopidy,vrs01/mopidy,glogiotatidis/mopidy,bencevans/mopidy,adamcik/mopidy,adamcik/mopidy,rawdlite/mopidy,hkariti/mopidy,swak/mopidy,ali/mopidy,hkariti/mopidy,glogiotatidis/mopidy,liamw9534/mopidy,liamw9534/mopidy,quartz55/mopidy,glogiotatidis/mopidy,vrs01/mopidy,priestd09/mopidy,tkem/mopidy,pacificIT/mopidy,priestd09/mopidy,dbrgn/mopidy,abarisain/mopidy,dbrgn/mopidy,ZenithDK/mopidy,diandiankan/mopidy,mokieyue/mopidy,bacontext/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,ZenithDK/mopidy,diandiankan/mopidy,jmarsik/mopidy,vrs01/mopidy,rawdlite/mopidy,ZenithDK/mopidy,rawdlite/mopidy,jodal/mopidy,ali/mopidy,mopidy/mopidy,jcass77/mopidy,SuperStarPL/mopidy,jcass77/mopidy,woutervanwijk/mopidy,bacontext/mopidy,jcass77/mopidy,woutervanwijk/mopidy,ali/mopidy,tkem/mopidy,bencevans/mopidy,bacontext/mopidy,priestd09/mopidy,bencevans/mopidy,hkariti/mopidy,diandiankan/mopidy,vrs01/mopidy,adamcik/mopidy,mopidy/mopidy,pacificIT/mopidy,hkariti/mopidy,jodal/mopidy,quartz55/mopidy,jmarsik/mopidy,swak/mopidy,bencevans/mopidy,quartz55/mopidy,kingosticks/mopidy
|
import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify."""
logger.debug(u'Container loaded')
self.session_manager.refresh_stored_playlists()
Add missing container callbacks with debug log statements
|
import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist container loaded')
self.session_manager.refresh_stored_playlists()
def playlist_added(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist "%s" added at position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_moved(self, container, playlist, old_position, new_position,
userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" moved from position %d to %d',
playlist.name(), old_position, new_position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_removed(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" removed from position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
|
<commit_before>import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify."""
logger.debug(u'Container loaded')
self.session_manager.refresh_stored_playlists()
<commit_msg>Add missing container callbacks with debug log statements<commit_after>
|
import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist container loaded')
self.session_manager.refresh_stored_playlists()
def playlist_added(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist "%s" added at position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_moved(self, container, playlist, old_position, new_position,
userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" moved from position %d to %d',
playlist.name(), old_position, new_position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_removed(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" removed from position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
|
import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify."""
logger.debug(u'Container loaded')
self.session_manager.refresh_stored_playlists()
Add missing container callbacks with debug log statementsimport logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist container loaded')
self.session_manager.refresh_stored_playlists()
def playlist_added(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist "%s" added at position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_moved(self, container, playlist, old_position, new_position,
userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" moved from position %d to %d',
playlist.name(), old_position, new_position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_removed(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" removed from position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
|
<commit_before>import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify."""
logger.debug(u'Container loaded')
self.session_manager.refresh_stored_playlists()
<commit_msg>Add missing container callbacks with debug log statements<commit_after>import logging
from spotify.manager import SpotifyContainerManager as \
PyspotifyContainerManager
logger = logging.getLogger('mopidy.backends.spotify.container_manager')
class SpotifyContainerManager(PyspotifyContainerManager):
def __init__(self, session_manager):
PyspotifyContainerManager.__init__(self)
self.session_manager = session_manager
def container_loaded(self, container, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist container loaded')
self.session_manager.refresh_stored_playlists()
def playlist_added(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(u'Callback called: playlist "%s" added at position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_moved(self, container, playlist, old_position, new_position,
userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" moved from position %d to %d',
playlist.name(), old_position, new_position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
def playlist_removed(self, container, playlist, position, userdata):
"""Callback used by pyspotify"""
logger.debug(
u'Callback called: playlist "%s" removed from position %d',
playlist.name(), position)
# container_loaded() is called after this callback, so we do not need
# to handle this callback.
|
758f59f9167bb49102679ad95d074bf22b4a62f4
|
fixedwidthwriter/__init__.py
|
fixedwidthwriter/__init__.py
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_endings='linux'):
self.fd = fd
self.fields = fields
if line_endings == 'linux':
self.line_endings = '\n'
elif line_endings == 'windows':
self.line_endings = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
Rename the 'line_ending' argument to 'line_endings'. Breaking change.
|
Rename the 'line_ending' argument to 'line_endings'. Breaking change.
|
Python
|
mit
|
HardDiskD/py-fixedwidthwriter,ArthurPBressan/py-fixedwidthwriter
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
Rename the 'line_ending' argument to 'line_endings'. Breaking change.
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_endings='linux'):
self.fd = fd
self.fields = fields
if line_endings == 'linux':
self.line_endings = '\n'
elif line_endings == 'windows':
self.line_endings = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
<commit_before># coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
<commit_msg>Rename the 'line_ending' argument to 'line_endings'. Breaking change.<commit_after>
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_endings='linux'):
self.fd = fd
self.fields = fields
if line_endings == 'linux':
self.line_endings = '\n'
elif line_endings == 'windows':
self.line_endings = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
Rename the 'line_ending' argument to 'line_endings'. Breaking change.# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_endings='linux'):
self.fd = fd
self.fields = fields
if line_endings == 'linux':
self.line_endings = '\n'
elif line_endings == 'windows':
self.line_endings = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
<commit_before># coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
<commit_msg>Rename the 'line_ending' argument to 'line_endings'. Breaking change.<commit_after># coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_endings='linux'):
self.fd = fd
self.fields = fields
if line_endings == 'linux':
self.line_endings = '\n'
elif line_endings == 'windows':
self.line_endings = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
4d95f634dd7f856fa4fbaf4a20bda58c01fa58b4
|
tests/test_epubcheck.py
|
tests/test_epubcheck.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import pytest
import tablib
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
def test_csv_report(tmp_path):
results_file = tmp_path / 'results.csv'
main([samples.EPUB3_INVALID, '--csv', str(results_file)])
with results_file.open('r') as f:
dataset = tablib.Dataset().load(f.read(), format='csv', delimiter=';')
assert dataset[0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
def test_xls_report(tmp_path):
results_file = tmp_path / 'results.xls'
main([samples.EPUB3_INVALID, '--xls', str(results_file)])
with results_file.open('rb') as f:
databook = tablib.Databook().load(f.read(), format='xls')
assert databook.sheets()[1][0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
|
Add tests for CSV and XLS reporting
|
Add tests for CSV and XLS reporting
CSV export currently fails due to epubcheck passing the delimiting
character as bytes although Tablib expects it to be of type str.
The issue will not be fixed as Python 2 has reached end of life [1].
[1] https://github.com/jazzband/tablib/issues/369
|
Python
|
bsd-2-clause
|
titusz/epubcheck
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
Add tests for CSV and XLS reporting
CSV export currently fails due to epubcheck passing the delimiting
character as bytes although Tablib expects it to be of type str.
The issue will not be fixed as Python 2 has reached end of life [1].
[1] https://github.com/jazzband/tablib/issues/369
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import pytest
import tablib
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
def test_csv_report(tmp_path):
results_file = tmp_path / 'results.csv'
main([samples.EPUB3_INVALID, '--csv', str(results_file)])
with results_file.open('r') as f:
dataset = tablib.Dataset().load(f.read(), format='csv', delimiter=';')
assert dataset[0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
def test_xls_report(tmp_path):
results_file = tmp_path / 'results.xls'
main([samples.EPUB3_INVALID, '--xls', str(results_file)])
with results_file.open('rb') as f:
databook = tablib.Databook().load(f.read(), format='xls')
assert databook.sheets()[1][0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
<commit_msg>Add tests for CSV and XLS reporting
CSV export currently fails due to epubcheck passing the delimiting
character as bytes although Tablib expects it to be of type str.
The issue will not be fixed as Python 2 has reached end of life [1].
[1] https://github.com/jazzband/tablib/issues/369<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import pytest
import tablib
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
def test_csv_report(tmp_path):
results_file = tmp_path / 'results.csv'
main([samples.EPUB3_INVALID, '--csv', str(results_file)])
with results_file.open('r') as f:
dataset = tablib.Dataset().load(f.read(), format='csv', delimiter=';')
assert dataset[0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
def test_xls_report(tmp_path):
results_file = tmp_path / 'results.xls'
main([samples.EPUB3_INVALID, '--xls', str(results_file)])
with results_file.open('rb') as f:
databook = tablib.Databook().load(f.read(), format='xls')
assert databook.sheets()[1][0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
Add tests for CSV and XLS reporting
CSV export currently fails due to epubcheck passing the delimiting
character as bytes although Tablib expects it to be of type str.
The issue will not be fixed as Python 2 has reached end of life [1].
[1] https://github.com/jazzband/tablib/issues/369# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import pytest
import tablib
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
def test_csv_report(tmp_path):
results_file = tmp_path / 'results.csv'
main([samples.EPUB3_INVALID, '--csv', str(results_file)])
with results_file.open('r') as f:
dataset = tablib.Dataset().load(f.read(), format='csv', delimiter=';')
assert dataset[0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
def test_xls_report(tmp_path):
results_file = tmp_path / 'results.xls'
main([samples.EPUB3_INVALID, '--xls', str(results_file)])
with results_file.open('rb') as f:
databook = tablib.Databook().load(f.read(), format='xls')
assert databook.sheets()[1][0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
<commit_msg>Add tests for CSV and XLS reporting
CSV export currently fails due to epubcheck passing the delimiting
character as bytes although Tablib expects it to be of type str.
The issue will not be fixed as Python 2 has reached end of life [1].
[1] https://github.com/jazzband/tablib/issues/369<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
import pytest
import tablib
import epubcheck
from epubcheck import samples
from epubcheck.cli import main
def test_valid():
assert epubcheck.validate(samples.EPUB3_VALID)
def test_invalid():
assert not epubcheck.validate(samples.EPUB3_INVALID)
def test_main_valid(capsys):
argv = [samples.EPUB3_VALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' not in out and 'ERROR' not in err
assert exit_code == 0
def test_main_invalid(capsys):
argv = [samples.EPUB3_INVALID]
exit_code = main(argv)
out, err = capsys.readouterr()
assert 'ERROR' in err and 'WARNING' in out
assert exit_code == 1
def test_csv_report(tmp_path):
results_file = tmp_path / 'results.csv'
main([samples.EPUB3_INVALID, '--csv', str(results_file)])
with results_file.open('r') as f:
dataset = tablib.Dataset().load(f.read(), format='csv', delimiter=';')
assert dataset[0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
def test_xls_report(tmp_path):
results_file = tmp_path / 'results.xls'
main([samples.EPUB3_INVALID, '--xls', str(results_file)])
with results_file.open('rb') as f:
databook = tablib.Databook().load(f.read(), format='xls')
assert databook.sheets()[1][0][:3] == ('OPF-003', 'WARNING', 'invalid.epub')
|
e16a292d027f07c1f425229bdb8b74e0d2c66e1f
|
aws_roleshell.py
|
aws_roleshell.py
|
import argparse
from awscli.customizations.commands import BasicCommand
import os
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = ('Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
a = self._session.get_credentials()
os.environ['AWS_ACCESS_KEY_ID'] = a.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = a.secret_key
os.environ['AWS_SESSION_TOKEN'] = a.token
os.execvp(*get_exec_args(args.command))
|
import argparse
import os
import shlex
import textwrap
from awscli.customizations.commands import BasicCommand
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def print_creds(creds):
quoted_vars = map(shlex.quote, (creds.access_key,
creds.secret_key, creds.token))
print(textwrap.dedent("""\
export AWS_ACCESS_KEY_ID={}
export AWS_SECRET_ACCESS_KEY={}
export AWS_SESSION_TOKEN={}\
""".format(*quoted_vars)))
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
def run_command(creds, command):
os.environ['AWS_ACCESS_KEY_ID'] = creds.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = creds.secret_key
os.environ['AWS_SESSION_TOKEN'] = creds.token
os.execvp(*get_exec_args(command))
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = (
'Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
c = self._session.get_credentials()
if len(args.command) == 0:
print_creds(c)
else:
run_command(c, args.command)
|
Make it possible to eval() output instead of execing another shell.
|
Make it possible to eval() output instead of execing another shell.
|
Python
|
mit
|
hashbrowncipher/aws-roleshell
|
import argparse
from awscli.customizations.commands import BasicCommand
import os
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = ('Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
a = self._session.get_credentials()
os.environ['AWS_ACCESS_KEY_ID'] = a.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = a.secret_key
os.environ['AWS_SESSION_TOKEN'] = a.token
os.execvp(*get_exec_args(args.command))
Make it possible to eval() output instead of execing another shell.
|
import argparse
import os
import shlex
import textwrap
from awscli.customizations.commands import BasicCommand
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def print_creds(creds):
quoted_vars = map(shlex.quote, (creds.access_key,
creds.secret_key, creds.token))
print(textwrap.dedent("""\
export AWS_ACCESS_KEY_ID={}
export AWS_SECRET_ACCESS_KEY={}
export AWS_SESSION_TOKEN={}\
""".format(*quoted_vars)))
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
def run_command(creds, command):
os.environ['AWS_ACCESS_KEY_ID'] = creds.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = creds.secret_key
os.environ['AWS_SESSION_TOKEN'] = creds.token
os.execvp(*get_exec_args(command))
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = (
'Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
c = self._session.get_credentials()
if len(args.command) == 0:
print_creds(c)
else:
run_command(c, args.command)
|
<commit_before>import argparse
from awscli.customizations.commands import BasicCommand
import os
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = ('Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
a = self._session.get_credentials()
os.environ['AWS_ACCESS_KEY_ID'] = a.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = a.secret_key
os.environ['AWS_SESSION_TOKEN'] = a.token
os.execvp(*get_exec_args(args.command))
<commit_msg>Make it possible to eval() output instead of execing another shell.<commit_after>
|
import argparse
import os
import shlex
import textwrap
from awscli.customizations.commands import BasicCommand
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def print_creds(creds):
quoted_vars = map(shlex.quote, (creds.access_key,
creds.secret_key, creds.token))
print(textwrap.dedent("""\
export AWS_ACCESS_KEY_ID={}
export AWS_SECRET_ACCESS_KEY={}
export AWS_SESSION_TOKEN={}\
""".format(*quoted_vars)))
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
def run_command(creds, command):
os.environ['AWS_ACCESS_KEY_ID'] = creds.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = creds.secret_key
os.environ['AWS_SESSION_TOKEN'] = creds.token
os.execvp(*get_exec_args(command))
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = (
'Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
c = self._session.get_credentials()
if len(args.command) == 0:
print_creds(c)
else:
run_command(c, args.command)
|
import argparse
from awscli.customizations.commands import BasicCommand
import os
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = ('Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
a = self._session.get_credentials()
os.environ['AWS_ACCESS_KEY_ID'] = a.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = a.secret_key
os.environ['AWS_SESSION_TOKEN'] = a.token
os.execvp(*get_exec_args(args.command))
Make it possible to eval() output instead of execing another shell.import argparse
import os
import shlex
import textwrap
from awscli.customizations.commands import BasicCommand
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def print_creds(creds):
quoted_vars = map(shlex.quote, (creds.access_key,
creds.secret_key, creds.token))
print(textwrap.dedent("""\
export AWS_ACCESS_KEY_ID={}
export AWS_SECRET_ACCESS_KEY={}
export AWS_SESSION_TOKEN={}\
""".format(*quoted_vars)))
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
def run_command(creds, command):
os.environ['AWS_ACCESS_KEY_ID'] = creds.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = creds.secret_key
os.environ['AWS_SESSION_TOKEN'] = creds.token
os.execvp(*get_exec_args(command))
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = (
'Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
c = self._session.get_credentials()
if len(args.command) == 0:
print_creds(c)
else:
run_command(c, args.command)
|
<commit_before>import argparse
from awscli.customizations.commands import BasicCommand
import os
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = ('Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
a = self._session.get_credentials()
os.environ['AWS_ACCESS_KEY_ID'] = a.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = a.secret_key
os.environ['AWS_SESSION_TOKEN'] = a.token
os.execvp(*get_exec_args(args.command))
<commit_msg>Make it possible to eval() output instead of execing another shell.<commit_after>import argparse
import os
import shlex
import textwrap
from awscli.customizations.commands import BasicCommand
def awscli_initialize(event_hooks):
event_hooks.register('building-command-table.main', inject_commands)
def inject_commands(command_table, session, **kwargs):
command_table['roleshell'] = RoleShell(session)
def print_creds(creds):
quoted_vars = map(shlex.quote, (creds.access_key,
creds.secret_key, creds.token))
print(textwrap.dedent("""\
export AWS_ACCESS_KEY_ID={}
export AWS_SECRET_ACCESS_KEY={}
export AWS_SESSION_TOKEN={}\
""".format(*quoted_vars)))
def get_exec_args(input_command):
if len(input_command) == 0:
input_command = (os.environ['SHELL'],)
return (input_command[0], input_command)
def run_command(creds, command):
os.environ['AWS_ACCESS_KEY_ID'] = creds.access_key
os.environ['AWS_SECRET_ACCESS_KEY'] = creds.secret_key
os.environ['AWS_SESSION_TOKEN'] = creds.token
os.execvp(*get_exec_args(command))
class RoleShell(BasicCommand):
NAME = 'roleshell'
DESCRIPTION = (
'Executes a shell with temporary AWS credentials provided as environment variables')
ARG_TABLE = [
dict(name='command', nargs=argparse.REMAINDER, positional_arg=True),
]
def _run_main(self, args, parsed_globals):
c = self._session.get_credentials()
if len(args.command) == 0:
print_creds(c)
else:
run_command(c, args.command)
|
7a8112249de859a5ef73fe07eb6029aeb1266f35
|
tob-api/tob_api/urls.py
|
tob-api/tob_api/urls.py
|
"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
# url(r"^api/v1/", include("api.urls")),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
|
"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
|
Remove commented-out reference to v1
|
Remove commented-out reference to v1
Signed-off-by: Nicholas Rempel <b7f0f2181f2dc324d159332b253a82a715a40706@gmail.com>
|
Python
|
apache-2.0
|
swcurran/TheOrgBook,swcurran/TheOrgBook,WadeBarnes/TheOrgBook,swcurran/TheOrgBook,WadeBarnes/TheOrgBook,WadeBarnes/TheOrgBook,WadeBarnes/TheOrgBook,WadeBarnes/TheOrgBook,swcurran/TheOrgBook,swcurran/TheOrgBook
|
"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
# url(r"^api/v1/", include("api.urls")),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
Remove commented-out reference to v1
Signed-off-by: Nicholas Rempel <b7f0f2181f2dc324d159332b253a82a715a40706@gmail.com>
|
"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
|
<commit_before>"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
# url(r"^api/v1/", include("api.urls")),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
<commit_msg>Remove commented-out reference to v1
Signed-off-by: Nicholas Rempel <b7f0f2181f2dc324d159332b253a82a715a40706@gmail.com><commit_after>
|
"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
|
"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
# url(r"^api/v1/", include("api.urls")),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
Remove commented-out reference to v1
Signed-off-by: Nicholas Rempel <b7f0f2181f2dc324d159332b253a82a715a40706@gmail.com>"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
|
<commit_before>"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
# url(r"^api/v1/", include("api.urls")),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
<commit_msg>Remove commented-out reference to v1
Signed-off-by: Nicholas Rempel <b7f0f2181f2dc324d159332b253a82a715a40706@gmail.com><commit_after>"""
Definition of urls for tob_api.
"""
from django.conf.urls import include, url
from django.views.generic import RedirectView
from . import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = [
url(r"^$", RedirectView.as_view(url="api/v2/")),
url(
r"^api-auth/",
include("rest_framework.urls", namespace="rest_framework"),
),
url(r"^api/v2/", include("api_v2.urls")),
url(r"^health$", views.health),
]
|
8b80b8b82400bd78d72158471b5030309075310c
|
rdd/api.py
|
rdd/api.py
|
# -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.config = {}
if verbose is not None:
self.config['verbose'] = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
r = requests.request(method, url, data=data, headers=headers,
config=self.config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
return json.loads(r.content) if r.content.strip() else None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
|
# -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.verbose = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
config = {}
if self.verbose is not None:
config['verbose'] = self.verbose
r = requests.request(method, url, data=data, headers=headers,
config=config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
content = r.content.strip()
if content:
if self.verbose is not None:
self.verbose.write(content + '\n')
return json.loads(content)
else:
return None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
|
Print HTTP content in verbose mode
|
Print HTTP content in verbose mode
|
Python
|
mit
|
mlafeldt/rdd.py,mlafeldt/rdd.py,mlafeldt/rdd.py
|
# -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.config = {}
if verbose is not None:
self.config['verbose'] = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
r = requests.request(method, url, data=data, headers=headers,
config=self.config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
return json.loads(r.content) if r.content.strip() else None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
Print HTTP content in verbose mode
|
# -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.verbose = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
config = {}
if self.verbose is not None:
config['verbose'] = self.verbose
r = requests.request(method, url, data=data, headers=headers,
config=config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
content = r.content.strip()
if content:
if self.verbose is not None:
self.verbose.write(content + '\n')
return json.loads(content)
else:
return None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
|
<commit_before># -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.config = {}
if verbose is not None:
self.config['verbose'] = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
r = requests.request(method, url, data=data, headers=headers,
config=self.config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
return json.loads(r.content) if r.content.strip() else None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
<commit_msg>Print HTTP content in verbose mode<commit_after>
|
# -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.verbose = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
config = {}
if self.verbose is not None:
config['verbose'] = self.verbose
r = requests.request(method, url, data=data, headers=headers,
config=config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
content = r.content.strip()
if content:
if self.verbose is not None:
self.verbose.write(content + '\n')
return json.loads(content)
else:
return None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
|
# -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.config = {}
if verbose is not None:
self.config['verbose'] = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
r = requests.request(method, url, data=data, headers=headers,
config=self.config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
return json.loads(r.content) if r.content.strip() else None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
Print HTTP content in verbose mode# -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.verbose = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
config = {}
if self.verbose is not None:
config['verbose'] = self.verbose
r = requests.request(method, url, data=data, headers=headers,
config=config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
content = r.content.strip()
if content:
if self.verbose is not None:
self.verbose.write(content + '\n')
return json.loads(content)
else:
return None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
|
<commit_before># -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.config = {}
if verbose is not None:
self.config['verbose'] = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
r = requests.request(method, url, data=data, headers=headers,
config=self.config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
return json.loads(r.content) if r.content.strip() else None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
<commit_msg>Print HTTP content in verbose mode<commit_after># -*- coding: utf-8 -*-
"""Python implementation of the Readability Shortener API"""
import requests
try:
import simplejson as json
except ImportError:
import json
class Readability(object):
def __init__(self, url=None, verbose=None):
self.url = url or 'https://readability.com/api/shortener/v1'
self.verbose = verbose
def _request(self, method, path, data=None, headers=None):
url = self.url + path
config = {}
if self.verbose is not None:
config['verbose'] = self.verbose
r = requests.request(method, url, data=data, headers=headers,
config=config, allow_redirects=True)
r.raise_for_status()
if not 'application/json' in r.headers['Content-Type']:
raise TypeError('No JSON in response')
content = r.content.strip()
if content:
if self.verbose is not None:
self.verbose.write(content + '\n')
return json.loads(content)
else:
return None
def resources(self):
"""Retrieve information about sub-resources."""
return self._request('GET', '/')
def shorten(self, full_url):
"""Create a new shortened URL."""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = 'url=%s' % full_url
return self._request('POST', '/urls', data=data, headers=headers)
def metadata(self, url_id):
"""Retrieve available metadata of a shortened link."""
return self._request('GET', '/urls/%s' % url_id)
|
eb2699b6050534045b95e5ea78cb0ea68de474ed
|
website/members/apps.py
|
website/members/apps.py
|
from django.apps import AppConfig
class MembersConfig(AppConfig):
name = 'members'
verbose_name = 'UTN Member Management'
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
verbose_name = _('UTN Member Management')
|
Make members verbose name translatable
|
:speech_balloon: Make members verbose name translatable
|
Python
|
agpl-3.0
|
Dekker1/moore,UTNkar/moore,Dekker1/moore,Dekker1/moore,UTNkar/moore,UTNkar/moore,UTNkar/moore,Dekker1/moore
|
from django.apps import AppConfig
class MembersConfig(AppConfig):
name = 'members'
verbose_name = 'UTN Member Management'
:speech_balloon: Make members verbose name translatable
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
verbose_name = _('UTN Member Management')
|
<commit_before>from django.apps import AppConfig
class MembersConfig(AppConfig):
name = 'members'
verbose_name = 'UTN Member Management'
<commit_msg>:speech_balloon: Make members verbose name translatable<commit_after>
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
verbose_name = _('UTN Member Management')
|
from django.apps import AppConfig
class MembersConfig(AppConfig):
name = 'members'
verbose_name = 'UTN Member Management'
:speech_balloon: Make members verbose name translatablefrom django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
verbose_name = _('UTN Member Management')
|
<commit_before>from django.apps import AppConfig
class MembersConfig(AppConfig):
name = 'members'
verbose_name = 'UTN Member Management'
<commit_msg>:speech_balloon: Make members verbose name translatable<commit_after>from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class MembersConfig(AppConfig):
name = 'members'
verbose_name = _('UTN Member Management')
|
b545ebcd2b604bf293bfbbb1af5a9ab2ba6965c7
|
wayback3/wayback3.py
|
wayback3/wayback3.py
|
"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
Add a constant for WB root URL
|
Add a constant for WB root URL
|
Python
|
agpl-3.0
|
OpenSSR/openssr-parser,OpenSSR/openssr-parser
|
"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
Add a constant for WB root URL
|
"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
<commit_before>"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
<commit_msg>Add a constant for WB root URL<commit_after>
|
"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
Add a constant for WB root URL"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
<commit_before>"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
<commit_msg>Add a constant for WB root URL<commit_after>"""
Simple Python 3-safe package for accessing Wayback Machine archives
via its JSON API
"""
import datetime
import requests
FORMAT_STRING = "%Y%m%d%H%M%S" # Looks like "20130919044612"
AVAILABILITY_URL = "http://archive.org/wayback/available?url=%s"
WAYBACK_URL_ROOT = "http://web.archive.org"
def availability(url):
response = requests.get(AVAILABILITY_URL % (url))
print(response)
print(response.text)
response_j = response.json()
if response_j.get('archived_snapshots') == {}:
return None
else:
closest = response_j.get('archived_snapshots').get('closest')
avail = closest.get('available')
status = int(closest.get('status'))
timestamp = closest.get('timestamp')
timestamp = datetime.datetime.strptime(timestamp, FORMAT_STRING)
url = closest.get('url')
return {'verbatim': closest, 'url': url, 'timestamp': timestamp}
|
cb7b51414a034d50e44fb30c6528b878aa9c64ee
|
web_ui/opensesame.py
|
web_ui/opensesame.py
|
# Enter the password of the email address you intend to send emails from
password = ""
|
# Enter the password of the email address you intend to send emails from
email_address = ""
email_password = ""
# Enter the login information for the EPNM API Account
API_username = ""
API_password = ""
|
Add email and API template
|
Add email and API template
|
Python
|
apache-2.0
|
cisco-gve/epnm_alarm_report,cisco-gve/epnm_alarm_report,cisco-gve/epnm_alarm_report,cisco-gve/epnm_alarm_report
|
# Enter the password of the email address you intend to send emails from
password = ""Add email and API template
|
# Enter the password of the email address you intend to send emails from
email_address = ""
email_password = ""
# Enter the login information for the EPNM API Account
API_username = ""
API_password = ""
|
<commit_before># Enter the password of the email address you intend to send emails from
password = ""<commit_msg>Add email and API template<commit_after>
|
# Enter the password of the email address you intend to send emails from
email_address = ""
email_password = ""
# Enter the login information for the EPNM API Account
API_username = ""
API_password = ""
|
# Enter the password of the email address you intend to send emails from
password = ""Add email and API template# Enter the password of the email address you intend to send emails from
email_address = ""
email_password = ""
# Enter the login information for the EPNM API Account
API_username = ""
API_password = ""
|
<commit_before># Enter the password of the email address you intend to send emails from
password = ""<commit_msg>Add email and API template<commit_after># Enter the password of the email address you intend to send emails from
email_address = ""
email_password = ""
# Enter the login information for the EPNM API Account
API_username = ""
API_password = ""
|
ce8dc3daa6a4af3c5ed743fb2b5c4470bff7647b
|
test_knot.py
|
test_knot.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_returns_default_with_unknown_key(self):
c = knot.Container()
self.assertEqual(c('service', 'foobar'), 'foobar')
self.assertEqual(c('service', lambda c: 'foobar'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
|
Add test for default values.
|
Add test for default values.
|
Python
|
mit
|
jaapverloop/knot
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
Add test for default values.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_returns_default_with_unknown_key(self):
c = knot.Container()
self.assertEqual(c('service', 'foobar'), 'foobar')
self.assertEqual(c('service', lambda c: 'foobar'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for default values.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_returns_default_with_unknown_key(self):
c = knot.Container()
self.assertEqual(c('service', 'foobar'), 'foobar')
self.assertEqual(c('service', lambda c: 'foobar'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
Add test for default values.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_returns_default_with_unknown_key(self):
c = knot.Container()
self.assertEqual(c('service', 'foobar'), 'foobar')
self.assertEqual(c('service', lambda c: 'foobar'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for default values.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import knot
class TestContainer(unittest.TestCase):
def test_wrapper_looks_like_service(self):
c = knot.Container()
@c.service('service')
def service(container):
"""Docstring."""
pass
self.assertEqual(c['service'].__name__, 'service')
self.assertEqual(c['service'].__doc__, 'Docstring.')
def test_returns_if_value(self):
c = knot.Container({'value': 'foobar'})
self.assertEqual(c('value'), 'foobar')
def test_calls_if_service(self):
c = knot.Container()
@c.service('service')
def service(container):
return 'foobar'
self.assertEqual(c('service'), 'foobar')
def test_returns_default_with_unknown_key(self):
c = knot.Container()
self.assertEqual(c('service', 'foobar'), 'foobar')
self.assertEqual(c('service', lambda c: 'foobar'), 'foobar')
def test_shares_service(self):
c = knot.Container()
@c.service('service', True)
def service(container):
return {}
dict1 = c('service')
dict2 = c('service')
assert isinstance(dict1, dict)
assert isinstance(dict2, dict)
assert dict1 is dict2
if __name__ == '__main__':
unittest.main()
|
3a7612925905f129c247f4c139aa0b896499346d
|
dsmtpd/__init__.py
|
dsmtpd/__init__.py
|
# -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3git"
__author__ = ("Stephane Wirtel",)
__author_email__ = "stephane@wirtel.be"
|
# -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3"
__author__ = "Stephane Wirtel"
__author_email__ = "stephane@wirtel.be"
|
Use a single string for the author
|
Use a single string for the author
|
Python
|
bsd-2-clause
|
matrixise/dsmtpd
|
# -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3git"
__author__ = ("Stephane Wirtel",)
__author_email__ = "stephane@wirtel.be"
Use a single string for the author
|
# -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3"
__author__ = "Stephane Wirtel"
__author_email__ = "stephane@wirtel.be"
|
<commit_before># -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3git"
__author__ = ("Stephane Wirtel",)
__author_email__ = "stephane@wirtel.be"
<commit_msg>Use a single string for the author<commit_after>
|
# -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3"
__author__ = "Stephane Wirtel"
__author_email__ = "stephane@wirtel.be"
|
# -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3git"
__author__ = ("Stephane Wirtel",)
__author_email__ = "stephane@wirtel.be"
Use a single string for the author# -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3"
__author__ = "Stephane Wirtel"
__author_email__ = "stephane@wirtel.be"
|
<commit_before># -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3git"
__author__ = ("Stephane Wirtel",)
__author_email__ = "stephane@wirtel.be"
<commit_msg>Use a single string for the author<commit_after># -*- coding: utf-8 -*-
"""
dsmtpd
~~~~~~
:copyright: (c) 2013 by Stephane Wirtel <stephane@wirtel.be>
:license: BSD, see LICENSE for more details
"""
__name__ = "dsmtpd"
__version__ = "0.3"
__author__ = "Stephane Wirtel"
__author_email__ = "stephane@wirtel.be"
|
05fa3c4c6ab1d7619281399bb5f1db89e55c6fa8
|
einops/__init__.py
|
einops/__init__.py
|
__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, parse_shape, asnumpy
|
__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'einsum',
'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, einsum, parse_shape, asnumpy
|
Include einsum in main library
|
Include einsum in main library
|
Python
|
mit
|
arogozhnikov/einops
|
__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, parse_shape, asnumpy
Include einsum in main library
|
__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'einsum',
'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, einsum, parse_shape, asnumpy
|
<commit_before>__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, parse_shape, asnumpy
<commit_msg>Include einsum in main library<commit_after>
|
__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'einsum',
'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, einsum, parse_shape, asnumpy
|
__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, parse_shape, asnumpy
Include einsum in main library__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'einsum',
'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, einsum, parse_shape, asnumpy
|
<commit_before>__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, parse_shape, asnumpy
<commit_msg>Include einsum in main library<commit_after>__author__ = 'Alex Rogozhnikov'
__version__ = '0.4.1'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'einsum',
'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, einsum, parse_shape, asnumpy
|
5f9dcf6e277f3a2136840c56fbf3c117319cc41b
|
pyjokes/__init__.py
|
pyjokes/__init__.py
|
from __future__ import absolute_import
from .pyjokes import get_local_joke
from .chuck import get_chuck_nerd_joke
from .jokes import jokes
__version__ = '0.1.1'
|
Make jokes and functions available globally in the module
|
Make jokes and functions available globally in the module
|
Python
|
bsd-3-clause
|
birdsarah/pyjokes,gmarkall/pyjokes,trojjer/pyjokes,pyjokes/pyjokes,borjaayerdi/pyjokes,Wren6991/pyjokes,martinohanlon/pyjokes,ElectronicsGeek/pyjokes,bennuttall/pyjokes
|
Make jokes and functions available globally in the module
|
from __future__ import absolute_import
from .pyjokes import get_local_joke
from .chuck import get_chuck_nerd_joke
from .jokes import jokes
__version__ = '0.1.1'
|
<commit_before><commit_msg>Make jokes and functions available globally in the module<commit_after>
|
from __future__ import absolute_import
from .pyjokes import get_local_joke
from .chuck import get_chuck_nerd_joke
from .jokes import jokes
__version__ = '0.1.1'
|
Make jokes and functions available globally in the modulefrom __future__ import absolute_import
from .pyjokes import get_local_joke
from .chuck import get_chuck_nerd_joke
from .jokes import jokes
__version__ = '0.1.1'
|
<commit_before><commit_msg>Make jokes and functions available globally in the module<commit_after>from __future__ import absolute_import
from .pyjokes import get_local_joke
from .chuck import get_chuck_nerd_joke
from .jokes import jokes
__version__ = '0.1.1'
|
|
b18ab0218fe221dc71047b68a4016b9c107e3664
|
python/src/setup.py
|
python/src/setup.py
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.21.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.21",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.22.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.22",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
|
Bump version to 1.9.22.1 and depend on the CloudStorageClient 1.9.22.1
|
Bump version to 1.9.22.1 and depend on the CloudStorageClient 1.9.22.1
|
Python
|
apache-2.0
|
VirusTotal/appengine-pipelines,aozarov/appengine-pipelines,vendasta/appengine-pipelines,vendasta/appengine-pipelines,Loudr/appengine-pipelines,vendasta/appengine-pipelines,googlecloudplatform/appengine-pipelines,Loudr/appengine-pipelines,googlecloudplatform/appengine-pipelines,VirusTotal/appengine-pipelines,GoogleCloudPlatform/appengine-pipelines,VirusTotal/appengine-pipelines,vendasta/appengine-pipelines,VirusTotal/appengine-pipelines,googlecloudplatform/appengine-pipelines,aozarov/appengine-pipelines,Loudr/appengine-pipelines,googlecloudplatform/appengine-pipelines,aozarov/appengine-pipelines,GoogleCloudPlatform/appengine-pipelines,googlecloudplatform/appengine-pipelines,aozarov/appengine-pipelines,Loudr/appengine-pipelines,GoogleCloudPlatform/appengine-pipelines,GoogleCloudPlatform/appengine-pipelines,GoogleCloudPlatform/appengine-pipelines,VirusTotal/appengine-pipelines,vendasta/appengine-pipelines,aozarov/appengine-pipelines,Loudr/appengine-pipelines
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.21.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.21",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
Bump version to 1.9.22.1 and depend on the CloudStorageClient 1.9.22.1
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.22.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.22",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
|
<commit_before>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.21.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.21",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
<commit_msg>Bump version to 1.9.22.1 and depend on the CloudStorageClient 1.9.22.1<commit_after>
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.22.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.22",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
|
#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.21.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.21",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
Bump version to 1.9.22.1 and depend on the CloudStorageClient 1.9.22.1#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.22.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.22",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
|
<commit_before>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.21.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.21",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
<commit_msg>Bump version to 1.9.22.1 and depend on the CloudStorageClient 1.9.22.1<commit_after>#!/usr/bin/env python
"""Setup specs for packaging, distributing, and installing Pipeline lib."""
import setuptools
# To debug, set DISTUTILS_DEBUG env var to anything.
setuptools.setup(
name="GoogleAppEnginePipeline",
version="1.9.22.1",
packages=setuptools.find_packages(),
author="Google App Engine",
author_email="app-engine-pipeline-api@googlegroups.com",
keywords="google app engine pipeline data processing",
url="https://github.com/GoogleCloudPlatform/appengine-pipelines",
license="Apache License 2.0",
description=("Enable asynchronous pipeline style data processing on "
"App Engine"),
zip_safe=True,
include_package_data=True,
# Exclude these files from installation.
exclude_package_data={"": ["README"]},
install_requires=[
"GoogleAppEngineCloudStorageClient >= 1.9.22",
],
extras_require={'python2.5': ["simplejson >= 3.6.5"]}
)
|
948dc7e7e6d54e4f4e41288ab014cc2e0aa53e98
|
scraper.py
|
scraper.py
|
"""
Interface for web scraping
"""
import os
import sys
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
if __name__ == "__main__":
scrape()
|
"""
Interface for web scraping
"""
import os
import sys
import getpass
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
browser.get('https://my.unt.edu/psp/papd01/EMPLOYEE/EMPL/h/?tab=NTPA_GUEST')
euid = input('What is your EUID?')
password = getpass.getpass('What is your password?')
euid_field = browser.find_element_by_name('userid')
password_field = browser.find_element_by_name('pwd')
euid_field.send_keys(euid)
password_field.send_keys(password)
if __name__ == "__main__":
scrape()
|
Load sign in page and collect password.
|
Load sign in page and collect password.
|
Python
|
mit
|
undercase/scheduler
|
"""
Interface for web scraping
"""
import os
import sys
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
if __name__ == "__main__":
scrape()
Load sign in page and collect password.
|
"""
Interface for web scraping
"""
import os
import sys
import getpass
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
browser.get('https://my.unt.edu/psp/papd01/EMPLOYEE/EMPL/h/?tab=NTPA_GUEST')
euid = input('What is your EUID?')
password = getpass.getpass('What is your password?')
euid_field = browser.find_element_by_name('userid')
password_field = browser.find_element_by_name('pwd')
euid_field.send_keys(euid)
password_field.send_keys(password)
if __name__ == "__main__":
scrape()
|
<commit_before>"""
Interface for web scraping
"""
import os
import sys
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
if __name__ == "__main__":
scrape()
<commit_msg>Load sign in page and collect password.<commit_after>
|
"""
Interface for web scraping
"""
import os
import sys
import getpass
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
browser.get('https://my.unt.edu/psp/papd01/EMPLOYEE/EMPL/h/?tab=NTPA_GUEST')
euid = input('What is your EUID?')
password = getpass.getpass('What is your password?')
euid_field = browser.find_element_by_name('userid')
password_field = browser.find_element_by_name('pwd')
euid_field.send_keys(euid)
password_field.send_keys(password)
if __name__ == "__main__":
scrape()
|
"""
Interface for web scraping
"""
import os
import sys
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
if __name__ == "__main__":
scrape()
Load sign in page and collect password."""
Interface for web scraping
"""
import os
import sys
import getpass
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
browser.get('https://my.unt.edu/psp/papd01/EMPLOYEE/EMPL/h/?tab=NTPA_GUEST')
euid = input('What is your EUID?')
password = getpass.getpass('What is your password?')
euid_field = browser.find_element_by_name('userid')
password_field = browser.find_element_by_name('pwd')
euid_field.send_keys(euid)
password_field.send_keys(password)
if __name__ == "__main__":
scrape()
|
<commit_before>"""
Interface for web scraping
"""
import os
import sys
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
if __name__ == "__main__":
scrape()
<commit_msg>Load sign in page and collect password.<commit_after>"""
Interface for web scraping
"""
import os
import sys
import getpass
from selenium import webdriver
def scrape():
browser = webdriver.Chrome('/home/lowercase/Desktop/scheduler/chromedriver')
browser.get('https://my.unt.edu/psp/papd01/EMPLOYEE/EMPL/h/?tab=NTPA_GUEST')
euid = input('What is your EUID?')
password = getpass.getpass('What is your password?')
euid_field = browser.find_element_by_name('userid')
password_field = browser.find_element_by_name('pwd')
euid_field.send_keys(euid)
password_field.send_keys(password)
if __name__ == "__main__":
scrape()
|
fc87264fec2b13afb04fb89bfc7b2d4bbe2debdf
|
src/arc_utilities/ros_helpers.py
|
src/arc_utilities/ros_helpers.py
|
#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type, lock=None):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = lock
if self.lock is None:
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
|
#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Listener does not consume the message
(for consuming behavior, use the standard ros callback pattern)
Listener does not check timestamps of message headers
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
|
Remove optional lock input (I can't see when it would be useful) Document when Listener should be used
|
Remove optional lock input (I can't see when it would be useful)
Document when Listener should be used
|
Python
|
bsd-2-clause
|
WPI-ARC/arc_utilities,UM-ARM-Lab/arc_utilities,UM-ARM-Lab/arc_utilities,WPI-ARC/arc_utilities,UM-ARM-Lab/arc_utilities,WPI-ARC/arc_utilities
|
#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type, lock=None):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = lock
if self.lock is None:
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
Remove optional lock input (I can't see when it would be useful)
Document when Listener should be used
|
#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Listener does not consume the message
(for consuming behavior, use the standard ros callback pattern)
Listener does not check timestamps of message headers
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
|
<commit_before>#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type, lock=None):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = lock
if self.lock is None:
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
<commit_msg>Remove optional lock input (I can't see when it would be useful)
Document when Listener should be used<commit_after>
|
#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Listener does not consume the message
(for consuming behavior, use the standard ros callback pattern)
Listener does not check timestamps of message headers
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
|
#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type, lock=None):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = lock
if self.lock is None:
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
Remove optional lock input (I can't see when it would be useful)
Document when Listener should be used#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Listener does not consume the message
(for consuming behavior, use the standard ros callback pattern)
Listener does not check timestamps of message headers
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
|
<commit_before>#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type, lock=None):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = lock
if self.lock is None:
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
<commit_msg>Remove optional lock input (I can't see when it would be useful)
Document when Listener should be used<commit_after>#! /usr/bin/env python
import rospy
from threading import Lock
class Listener:
def __init__(self, topic_name, topic_type):
"""
Listener is a wrapper around a subscriber where the callback simply records the latest msg.
Listener does not consume the message
(for consuming behavior, use the standard ros callback pattern)
Listener does not check timestamps of message headers
Parameters:
topic_name (str): name of topic to subscribe to
topic_type (msg_type): type of message received on topic
lock (Lock): optional lock object used when setting stored data
"""
self.data = None
self.lock = Lock()
self.subscriber = rospy.Subscriber(topic_name, topic_type, self.callback)
def callback(self, msg):
with self.lock:
self.data = msg
def get(self):
"""
Returns the latest msg from the subscribed topic
"""
with self.lock:
return self.data
|
cc3f363c1fefb758302b82e7d0ddff69d55a8261
|
recognition/scrobbleTrack.py
|
recognition/scrobbleTrack.py
|
import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
|
import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
if not apiKey:
print "No Last.fm API Key was found."
sys.exit(3)
if not apiSecret:
print "No Last.fm API Secret was found."
sys.exit(3)
if not username:
print "No Last.fm username was found."
sys.exit(3)
if not password:
print "No Last.fm password was found."
sys.exit(3)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
|
Check for existing Last.fm creds before scrobbling
|
Check for existing Last.fm creds before scrobbling
|
Python
|
mit
|
jeffstephens/pi-resto,jeffstephens/pi-resto
|
import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
Check for existing Last.fm creds before scrobbling
|
import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
if not apiKey:
print "No Last.fm API Key was found."
sys.exit(3)
if not apiSecret:
print "No Last.fm API Secret was found."
sys.exit(3)
if not username:
print "No Last.fm username was found."
sys.exit(3)
if not password:
print "No Last.fm password was found."
sys.exit(3)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
|
<commit_before>import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
<commit_msg>Check for existing Last.fm creds before scrobbling<commit_after>
|
import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
if not apiKey:
print "No Last.fm API Key was found."
sys.exit(3)
if not apiSecret:
print "No Last.fm API Secret was found."
sys.exit(3)
if not username:
print "No Last.fm username was found."
sys.exit(3)
if not password:
print "No Last.fm password was found."
sys.exit(3)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
|
import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
Check for existing Last.fm creds before scrobblingimport os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
if not apiKey:
print "No Last.fm API Key was found."
sys.exit(3)
if not apiSecret:
print "No Last.fm API Secret was found."
sys.exit(3)
if not username:
print "No Last.fm username was found."
sys.exit(3)
if not password:
print "No Last.fm password was found."
sys.exit(3)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
|
<commit_before>import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
<commit_msg>Check for existing Last.fm creds before scrobbling<commit_after>import os, sys, json, pylast, calendar
from datetime import datetime
resultJson = json.loads(sys.stdin.read())
# exit immediately if recognition failed
if resultJson["status"]["msg"] != "Success":
print "Recognition failed."
sys.exit(2)
# load Last.fm auth details into environment variables
apiKey = os.environ["LASTFM_API_KEY"]
apiSecret = os.environ["LASTFM_API_SECRET"]
username = os.environ["LASTFM_USERNAME"]
password = os.environ["LASTFM_PASSWORD"]
passwordHash = pylast.md5(password)
if not apiKey:
print "No Last.fm API Key was found."
sys.exit(3)
if not apiSecret:
print "No Last.fm API Secret was found."
sys.exit(3)
if not username:
print "No Last.fm username was found."
sys.exit(3)
if not password:
print "No Last.fm password was found."
sys.exit(3)
# load song details from JSON object
songName = resultJson["metadata"]["music"][0]["title"]
songArtist = resultJson["metadata"]["music"][0]["artists"][0]["name"]
network = pylast.LastFMNetwork(api_key=apiKey, api_secret=apiSecret,
username=username, password_hash=passwordHash)
d = datetime.utcnow()
unixtime = calendar.timegm(d.utctimetuple())
network.scrobble(songArtist, songName, unixtime)
|
4c9e5df1bd52b0bad6fcfb2ac599999a00c8f413
|
__init__.py
|
__init__.py
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys
__version__ = "%d.%d.%d" % sys.version_info[:3]
del sys
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
# Distutils version
#
# Please coordinate with Marc-Andre Lemburg <mal@egenix.com> when adding
# new features to distutils that would warrant bumping the version number.
#
# In general, major and minor version should loosely follow the Python
# version number the distutils code was shipped with.
#
__version__ = "2.5.0"
|
Revert to having static version numbers again.
|
Revert to having static version numbers again.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys
__version__ = "%d.%d.%d" % sys.version_info[:3]
del sys
Revert to having static version numbers again.
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
# Distutils version
#
# Please coordinate with Marc-Andre Lemburg <mal@egenix.com> when adding
# new features to distutils that would warrant bumping the version number.
#
# In general, major and minor version should loosely follow the Python
# version number the distutils code was shipped with.
#
__version__ = "2.5.0"
|
<commit_before>"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys
__version__ = "%d.%d.%d" % sys.version_info[:3]
del sys
<commit_msg>Revert to having static version numbers again.<commit_after>
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
# Distutils version
#
# Please coordinate with Marc-Andre Lemburg <mal@egenix.com> when adding
# new features to distutils that would warrant bumping the version number.
#
# In general, major and minor version should loosely follow the Python
# version number the distutils code was shipped with.
#
__version__ = "2.5.0"
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys
__version__ = "%d.%d.%d" % sys.version_info[:3]
del sys
Revert to having static version numbers again."""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
# Distutils version
#
# Please coordinate with Marc-Andre Lemburg <mal@egenix.com> when adding
# new features to distutils that would warrant bumping the version number.
#
# In general, major and minor version should loosely follow the Python
# version number the distutils code was shipped with.
#
__version__ = "2.5.0"
|
<commit_before>"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
import sys
__version__ = "%d.%d.%d" % sys.version_info[:3]
del sys
<commit_msg>Revert to having static version numbers again.<commit_after>"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id$"
# Distutils version
#
# Please coordinate with Marc-Andre Lemburg <mal@egenix.com> when adding
# new features to distutils that would warrant bumping the version number.
#
# In general, major and minor version should loosely follow the Python
# version number the distutils code was shipped with.
#
__version__ = "2.5.0"
|
ca584c5ddf942d867585de00ee786101d8ab7438
|
playa/web/templatetags.py
|
playa/web/templatetags.py
|
import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metdata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)
|
import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metadata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)
|
Correct bug with metadata ref
|
Correct bug with metadata ref
|
Python
|
apache-2.0
|
disqus/playa,disqus/playa
|
import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metdata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)Correct bug with metadata ref
|
import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metadata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)
|
<commit_before>import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metdata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)<commit_msg>Correct bug with metadata ref<commit_after>
|
import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metadata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)
|
import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metdata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)Correct bug with metadata refimport urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metadata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)
|
<commit_before>import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metdata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)<commit_msg>Correct bug with metadata ref<commit_after>import urllib
from playa import app
@app.template_filter('duration')
def duration(seconds):
return '%s:%s' % (int(seconds / 60), ('0' + str(int(seconds % 60)))[-2:])
@app.template_filter('song_title')
def song_title(metadata):
if 'artist' in metadata:
return '%s - %s' % (metadata['artist'], metadata['title'])
return metadata['title']
app.template_filter('urlquote')(urllib.quote)
|
e6210531dac1d7efd5fd4d343dcac74a0b74515e
|
request_profiler/settings.py
|
request_profiler/settings.py
|
# -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC', lambda r: True)
|
# -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(
settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
lambda r: not (hasattr(r, 'user') and r.user.is_staff)
)
|
Update GLOBAL_EXCLUDE_FUNC default to exclude admins
|
Update GLOBAL_EXCLUDE_FUNC default to exclude admins
|
Python
|
mit
|
yunojuno/django-request-profiler,yunojuno/django-request-profiler,sigshen/django-request-profiler,sigshen/django-request-profiler
|
# -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC', lambda r: True)
Update GLOBAL_EXCLUDE_FUNC default to exclude admins
|
# -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(
settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
lambda r: not (hasattr(r, 'user') and r.user.is_staff)
)
|
<commit_before># -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC', lambda r: True)
<commit_msg>Update GLOBAL_EXCLUDE_FUNC default to exclude admins<commit_after>
|
# -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(
settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
lambda r: not (hasattr(r, 'user') and r.user.is_staff)
)
|
# -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC', lambda r: True)
Update GLOBAL_EXCLUDE_FUNC default to exclude admins# -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(
settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
lambda r: not (hasattr(r, 'user') and r.user.is_staff)
)
|
<commit_before># -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC', lambda r: True)
<commit_msg>Update GLOBAL_EXCLUDE_FUNC default to exclude admins<commit_after># -*- coding: utf-8 -*-
# models definitions for request_profiler
from django.conf import settings
# cache key used to store enabled rulesets.
RULESET_CACHE_KEY = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_KEY', "request_profiler__rulesets") # noqa
# how long to cache them for - defaults to 10s
RULESET_CACHE_TIMEOUT = getattr(settings, 'REQUEST_PROFILER_RULESET_CACHE_TIMEOUT', 10) # noqa
# This is a function that can be used to override all rules to exclude requests from profiling
# e.g. you can use this to ignore staff, or search engine bots, etc.
GLOBAL_EXCLUDE_FUNC = getattr(
settings, 'REQUEST_PROFILER_GLOBAL_EXCLUDE_FUNC',
lambda r: not (hasattr(r, 'user') and r.user.is_staff)
)
|
8e202175767660bd90c4a894953d2553eec1a1d3
|
pythonx/completers/common/__init__.py
|
pythonx/completers/common/__init__.py
|
# -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in ('ultisnips', 'buffer')]))
|
# -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
hooks = ['ultisnips', 'buffer']
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in self.hooks]))
|
Make it possible to extend common completions
|
Make it possible to extend common completions
|
Python
|
mit
|
maralla/completor.vim,maralla/completor.vim
|
# -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in ('ultisnips', 'buffer')]))
Make it possible to extend common completions
|
# -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
hooks = ['ultisnips', 'buffer']
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in self.hooks]))
|
<commit_before># -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in ('ultisnips', 'buffer')]))
<commit_msg>Make it possible to extend common completions<commit_after>
|
# -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
hooks = ['ultisnips', 'buffer']
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in self.hooks]))
|
# -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in ('ultisnips', 'buffer')]))
Make it possible to extend common completions# -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
hooks = ['ultisnips', 'buffer']
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in self.hooks]))
|
<commit_before># -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in ('ultisnips', 'buffer')]))
<commit_msg>Make it possible to extend common completions<commit_after># -*- coding: utf-8 -*-
import completor
import itertools
import re
from completor.compat import text_type
from .filename import Filename # noqa
from .buffer import Buffer # noqa
from .omni import Omni # noqa
try:
from UltiSnips import UltiSnips_Manager # noqa
from .ultisnips import Ultisnips # noqa
except ImportError:
pass
word = re.compile(r'[^\W\d]\w*$', re.U)
class Common(completor.Completor):
filetype = 'common'
sync = True
hooks = ['ultisnips', 'buffer']
def completions(self, completer, base):
com = completor.get(completer)
if not com:
return []
com.ft = self.ft
if com.disabled:
return []
return com.parse(base)
def parse(self, base):
if not isinstance(base, text_type):
return []
match = word.search(base)
if not match:
return []
base = match.group()
if len(base) < self.get_option('min_chars'):
return []
return list(itertools.chain(
*[self.completions(n, base) for n in self.hooks]))
|
536bed6c3ff0a819075a04e14296518f1368cc74
|
rest_framework_json_api/exceptions.py
|
rest_framework_json_api/exceptions.py
|
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
if isinstance(error, dict):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
|
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
# or a string in case of AuthenticationError
if isinstance(error, dict) or isinstance(error, str):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
|
Modify exception_handler for a string error like AuthenticationError
|
Modify exception_handler for a string error like AuthenticationError
|
Python
|
bsd-2-clause
|
leo-naeka/django-rest-framework-json-api,Instawork/django-rest-framework-json-api,pombredanne/django-rest-framework-json-api,grapo/django-rest-framework-json-api,hnakamur/django-rest-framework-json-api,leo-naeka/rest_framework_ember,django-json-api/django-rest-framework-json-api,abdulhaq-e/django-rest-framework-json-api,martinmaillard/django-rest-framework-json-api,lukaslundgren/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,leifurhauks/django-rest-framework-json-api,schtibe/django-rest-framework-json-api,scottfisk/django-rest-framework-json-api,django-json-api/rest_framework_ember,kaldras/django-rest-framework-json-api
|
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
if isinstance(error, dict):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
Modify exception_handler for a string error like AuthenticationError
|
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
# or a string in case of AuthenticationError
if isinstance(error, dict) or isinstance(error, str):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
|
<commit_before>from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
if isinstance(error, dict):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
<commit_msg>Modify exception_handler for a string error like AuthenticationError<commit_after>
|
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
# or a string in case of AuthenticationError
if isinstance(error, dict) or isinstance(error, str):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
|
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
if isinstance(error, dict):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
Modify exception_handler for a string error like AuthenticationErrorfrom django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
# or a string in case of AuthenticationError
if isinstance(error, dict) or isinstance(error, str):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
|
<commit_before>from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
if isinstance(error, dict):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
<commit_msg>Modify exception_handler for a string error like AuthenticationError<commit_after>from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from rest_framework import status
from rest_framework.exceptions import APIException
from rest_framework.views import exception_handler as drf_exception_handler
from rest_framework_json_api.utils import format_value
def exception_handler(exc, context):
response = drf_exception_handler(exc, context)
errors = []
# handle generic errors. ValidationError('test') in a view for example
if isinstance(response.data, list):
for message in response.data:
errors.append({
'detail': message,
'source': {
'pointer': '/data',
},
'status': encoding.force_text(response.status_code),
})
# handle all errors thrown from serializers
else:
for field, error in response.data.items():
field = format_value(field)
pointer = '/data/attributes/{}'.format(field)
# see if they passed a dictionary to ValidationError manually
# or a string in case of AuthenticationError
if isinstance(error, dict) or isinstance(error, str):
errors.append(error)
else:
for message in error:
errors.append({
'detail': message,
'source': {
'pointer': pointer,
},
'status': encoding.force_text(response.status_code),
})
context['view'].resource_name = 'errors'
response.data = errors
return response
class Conflict(APIException):
status_code = status.HTTP_409_CONFLICT
default_detail = _('Conflict.')
|
9569a37369e37dbb2a567423fa20a76948439e21
|
api/BucketListAPI.py
|
api/BucketListAPI.py
|
from flask import Flask, jsonify, request
from modals.modals import User
from api import create_app, db
from validate_email import validate_email
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
if not name or not email or not password:
response = jsonify({'Error': 'Missing Values'})
response.status_code = 400
return response
if not validate_email(email):
response = jsonify({'Error': 'Invalid Email'})
response.status_code = 400
return response
if len(password) < 6:
response = jsonify({'Error': 'Password is short'})
response.status_code = 400
return response
user = User(email=email, password=password, name=name)
res = user.get_all()
if email in [r.email for r in res]:
response = jsonify({'Error': 'Email Already exists'})
response.status_code = 400
return response
user.save()
response = jsonify({
'Status': user.email + ' Successfully registered'
})
response.status_code = 201
return response
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
|
from flask import jsonify, request
from api import create_app
from classes.user import User
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
user = User(email, password, name)
return user.register()
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
|
Move register code to User class
|
Move register code to User class
|
Python
|
mit
|
patlub/BucketListAPI,patlub/BucketListAPI
|
from flask import Flask, jsonify, request
from modals.modals import User
from api import create_app, db
from validate_email import validate_email
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
if not name or not email or not password:
response = jsonify({'Error': 'Missing Values'})
response.status_code = 400
return response
if not validate_email(email):
response = jsonify({'Error': 'Invalid Email'})
response.status_code = 400
return response
if len(password) < 6:
response = jsonify({'Error': 'Password is short'})
response.status_code = 400
return response
user = User(email=email, password=password, name=name)
res = user.get_all()
if email in [r.email for r in res]:
response = jsonify({'Error': 'Email Already exists'})
response.status_code = 400
return response
user.save()
response = jsonify({
'Status': user.email + ' Successfully registered'
})
response.status_code = 201
return response
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
Move register code to User class
|
from flask import jsonify, request
from api import create_app
from classes.user import User
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
user = User(email, password, name)
return user.register()
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
|
<commit_before>from flask import Flask, jsonify, request
from modals.modals import User
from api import create_app, db
from validate_email import validate_email
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
if not name or not email or not password:
response = jsonify({'Error': 'Missing Values'})
response.status_code = 400
return response
if not validate_email(email):
response = jsonify({'Error': 'Invalid Email'})
response.status_code = 400
return response
if len(password) < 6:
response = jsonify({'Error': 'Password is short'})
response.status_code = 400
return response
user = User(email=email, password=password, name=name)
res = user.get_all()
if email in [r.email for r in res]:
response = jsonify({'Error': 'Email Already exists'})
response.status_code = 400
return response
user.save()
response = jsonify({
'Status': user.email + ' Successfully registered'
})
response.status_code = 201
return response
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
<commit_msg>Move register code to User class<commit_after>
|
from flask import jsonify, request
from api import create_app
from classes.user import User
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
user = User(email, password, name)
return user.register()
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
|
from flask import Flask, jsonify, request
from modals.modals import User
from api import create_app, db
from validate_email import validate_email
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
if not name or not email or not password:
response = jsonify({'Error': 'Missing Values'})
response.status_code = 400
return response
if not validate_email(email):
response = jsonify({'Error': 'Invalid Email'})
response.status_code = 400
return response
if len(password) < 6:
response = jsonify({'Error': 'Password is short'})
response.status_code = 400
return response
user = User(email=email, password=password, name=name)
res = user.get_all()
if email in [r.email for r in res]:
response = jsonify({'Error': 'Email Already exists'})
response.status_code = 400
return response
user.save()
response = jsonify({
'Status': user.email + ' Successfully registered'
})
response.status_code = 201
return response
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
Move register code to User classfrom flask import jsonify, request
from api import create_app
from classes.user import User
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
user = User(email, password, name)
return user.register()
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
|
<commit_before>from flask import Flask, jsonify, request
from modals.modals import User
from api import create_app, db
from validate_email import validate_email
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
if not name or not email or not password:
response = jsonify({'Error': 'Missing Values'})
response.status_code = 400
return response
if not validate_email(email):
response = jsonify({'Error': 'Invalid Email'})
response.status_code = 400
return response
if len(password) < 6:
response = jsonify({'Error': 'Password is short'})
response.status_code = 400
return response
user = User(email=email, password=password, name=name)
res = user.get_all()
if email in [r.email for r in res]:
response = jsonify({'Error': 'Email Already exists'})
response.status_code = 400
return response
user.save()
response = jsonify({
'Status': user.email + ' Successfully registered'
})
response.status_code = 201
return response
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
<commit_msg>Move register code to User class<commit_after>from flask import jsonify, request
from api import create_app
from classes.user import User
app = create_app('DevelopmentEnv')
@app.route('/')
def index():
response = jsonify({'Welcome Message': 'Hello'})
response.status_code = 201
return response
@app.route('/auth/register', methods=['POST'])
def register():
request.get_json(force=True)
try:
name = request.json['name']
email = request.json['email']
password = request.json['password']
user = User(email, password, name)
return user.register()
except KeyError:
response = jsonify({'Error': 'Invalid Keys detected'})
response.status_code = 500
return response
if __name__ == '__main__':
app.debug = True
app.run()
app.run(debug=True)
|
3ad37c4acfb1d34978941cea2663cb31f1460503
|
spotify.py
|
spotify.py
|
from willie import web
from willie import module
import time
import json
import urllib
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')
|
from willie import web
from willie import module
import time
import json
import re
regex = re.compile('(play.spotify.com\/track\/)([\w-]+)')
def setup(bot):
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.WillieMemory()
bot.memory['url_callbacks'][regex] = spotify
def shutdown(bot):
del bot.memory['url_callbacks'][regex]
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')
|
Add callback to url_callbacks so url module doesn't query it
|
Add callback to url_callbacks so url module doesn't query it
|
Python
|
mit
|
Metastruct/hal1320
|
from willie import web
from willie import module
import time
import json
import urllib
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')Add callback to url_callbacks so url module doesn't query it
|
from willie import web
from willie import module
import time
import json
import re
regex = re.compile('(play.spotify.com\/track\/)([\w-]+)')
def setup(bot):
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.WillieMemory()
bot.memory['url_callbacks'][regex] = spotify
def shutdown(bot):
del bot.memory['url_callbacks'][regex]
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')
|
<commit_before>from willie import web
from willie import module
import time
import json
import urllib
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')<commit_msg>Add callback to url_callbacks so url module doesn't query it<commit_after>
|
from willie import web
from willie import module
import time
import json
import re
regex = re.compile('(play.spotify.com\/track\/)([\w-]+)')
def setup(bot):
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.WillieMemory()
bot.memory['url_callbacks'][regex] = spotify
def shutdown(bot):
del bot.memory['url_callbacks'][regex]
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')
|
from willie import web
from willie import module
import time
import json
import urllib
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')Add callback to url_callbacks so url module doesn't query itfrom willie import web
from willie import module
import time
import json
import re
regex = re.compile('(play.spotify.com\/track\/)([\w-]+)')
def setup(bot):
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.WillieMemory()
bot.memory['url_callbacks'][regex] = spotify
def shutdown(bot):
del bot.memory['url_callbacks'][regex]
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')
|
<commit_before>from willie import web
from willie import module
import time
import json
import urllib
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')<commit_msg>Add callback to url_callbacks so url module doesn't query it<commit_after>from willie import web
from willie import module
import time
import json
import re
regex = re.compile('(play.spotify.com\/track\/)([\w-]+)')
def setup(bot):
if not bot.memory.contains('url_callbacks'):
bot.memory['url_callbacks'] = tools.WillieMemory()
bot.memory['url_callbacks'][regex] = spotify
def shutdown(bot):
del bot.memory['url_callbacks'][regex]
@module.rule('.*(play.spotify.com\/track\/)([\w-]+).*')
def spotify(bot, trigger, found_match=None):
match = found_match or trigger
resp = web.get('https://api.spotify.com/v1/tracks/%s' % match.group(2))
result = json.loads(resp)
try:
artist = result['artists'][0]['name']
title = result['name']
album = result['album']['name']
duration = result['duration_ms']
duration_hms = time.strftime('%H:%M:%S', time.gmtime(duration / 1000))
bot.say('{0} - {1} [{2}] | {3}'.format(artist, title, album, duration_hms))
except KeyError:
bot.say('Track not found.')
|
ff4204659b158827070fbb4bdf9bfc1f263e8b33
|
fanstatic/registry.py
|
fanstatic/registry.py
|
import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
if libraries is None:
return
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
|
import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
|
Remove some code that wasn't in use anymore.
|
Remove some code that wasn't in use anymore.
|
Python
|
bsd-3-clause
|
fanstatic/fanstatic,fanstatic/fanstatic
|
import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
if libraries is None:
return
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
Remove some code that wasn't in use anymore.
|
import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
|
<commit_before>import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
if libraries is None:
return
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
<commit_msg>Remove some code that wasn't in use anymore.<commit_after>
|
import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
|
import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
if libraries is None:
return
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
Remove some code that wasn't in use anymore.import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
|
<commit_before>import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
if libraries is None:
return
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
<commit_msg>Remove some code that wasn't in use anymore.<commit_after>import pkg_resources
ENTRY_POINT = 'fanstatic.libraries'
class LibraryRegistry(dict):
"""A dictionary-like registry of libraries.
This is a dictionary that mains libraries. A value is
a :py:class:`Library` instance, and a key is its
library ``name``.
Normally there is only a single global LibraryRegistry,
obtained by calling ``get_library_registry()``.
:param libraries: a sequence of libraries
"""
def __init__(self, libraries):
for library in libraries:
self[library.name] = library
def add(self, library):
"""Add a Library instance to the registry.
:param add: add a library to the registry.
"""
self[library.name] = library
def get_libraries_from_entry_points():
libraries = []
for entry_point in pkg_resources.iter_entry_points(ENTRY_POINT):
libraries.append(entry_point.load())
return libraries
_library_registry = None
def get_library_registry():
'''Get the global :py:class:`LibraryRegistry`.
It gets filled with the libraries registered using the fanstatic
entry point.
You can also add libraries to it later.
'''
global _library_registry
if _library_registry is not None:
return _library_registry
_library_registry = LibraryRegistry(get_libraries_from_entry_points())
return _library_registry
|
45cfd59f5bc8e91a88e54fda83f868f7bb3c4884
|
examples/wsgi_app.py
|
examples/wsgi_app.py
|
import guv
guv.monkey_patch()
import json
import bottle
import guv.wsgi
import logger
logger.configure()
app = bottle.Bottle()
@app.route('/')
def index():
data = json.dumps({'status': True})
return data
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
|
import guv
guv.monkey_patch()
import guv.wsgi
import logger
logger.configure()
def app(environ, start_response):
status = '200 OK'
output = [b'Hello World!']
content_length = str(len(b''.join(output)))
response_headers = [('Content-type', 'text/plain'),
('Content-Length', content_length)]
start_response(status, response_headers)
return output
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
|
Use bare WSGI application for testing and benchmarking
|
Use bare WSGI application for testing and benchmarking
|
Python
|
mit
|
veegee/guv,veegee/guv
|
import guv
guv.monkey_patch()
import json
import bottle
import guv.wsgi
import logger
logger.configure()
app = bottle.Bottle()
@app.route('/')
def index():
data = json.dumps({'status': True})
return data
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
Use bare WSGI application for testing and benchmarking
|
import guv
guv.monkey_patch()
import guv.wsgi
import logger
logger.configure()
def app(environ, start_response):
status = '200 OK'
output = [b'Hello World!']
content_length = str(len(b''.join(output)))
response_headers = [('Content-type', 'text/plain'),
('Content-Length', content_length)]
start_response(status, response_headers)
return output
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
|
<commit_before>import guv
guv.monkey_patch()
import json
import bottle
import guv.wsgi
import logger
logger.configure()
app = bottle.Bottle()
@app.route('/')
def index():
data = json.dumps({'status': True})
return data
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
<commit_msg>Use bare WSGI application for testing and benchmarking<commit_after>
|
import guv
guv.monkey_patch()
import guv.wsgi
import logger
logger.configure()
def app(environ, start_response):
status = '200 OK'
output = [b'Hello World!']
content_length = str(len(b''.join(output)))
response_headers = [('Content-type', 'text/plain'),
('Content-Length', content_length)]
start_response(status, response_headers)
return output
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
|
import guv
guv.monkey_patch()
import json
import bottle
import guv.wsgi
import logger
logger.configure()
app = bottle.Bottle()
@app.route('/')
def index():
data = json.dumps({'status': True})
return data
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
Use bare WSGI application for testing and benchmarkingimport guv
guv.monkey_patch()
import guv.wsgi
import logger
logger.configure()
def app(environ, start_response):
status = '200 OK'
output = [b'Hello World!']
content_length = str(len(b''.join(output)))
response_headers = [('Content-type', 'text/plain'),
('Content-Length', content_length)]
start_response(status, response_headers)
return output
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
|
<commit_before>import guv
guv.monkey_patch()
import json
import bottle
import guv.wsgi
import logger
logger.configure()
app = bottle.Bottle()
@app.route('/')
def index():
data = json.dumps({'status': True})
return data
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
<commit_msg>Use bare WSGI application for testing and benchmarking<commit_after>import guv
guv.monkey_patch()
import guv.wsgi
import logger
logger.configure()
def app(environ, start_response):
status = '200 OK'
output = [b'Hello World!']
content_length = str(len(b''.join(output)))
response_headers = [('Content-type', 'text/plain'),
('Content-Length', content_length)]
start_response(status, response_headers)
return output
if __name__ == '__main__':
server_sock = guv.listen(('0.0.0.0', 8001))
guv.wsgi.serve(server_sock, app)
|
3f747610f080879774720aa1efe38f40364ea151
|
raco/myrial/type_tests.py
|
raco/myrial/type_tests.py
|
"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
|
"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from raco.expression import TypeSafetyViolation
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
def invalid_eq(self):
query = """
X = [FROM SCAN(public:adhoc:mytable) AS X EMIT clong=cstring];
STORE(X, OUTPUT);
"""
with self.assertRaises(TypeSafetyViolation):
self.check_scheme(query, None)
|
Test of invalid equality test
|
Test of invalid equality test
|
Python
|
bsd-3-clause
|
uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco
|
"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
Test of invalid equality test
|
"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from raco.expression import TypeSafetyViolation
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
def invalid_eq(self):
query = """
X = [FROM SCAN(public:adhoc:mytable) AS X EMIT clong=cstring];
STORE(X, OUTPUT);
"""
with self.assertRaises(TypeSafetyViolation):
self.check_scheme(query, None)
|
<commit_before>"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
<commit_msg>Test of invalid equality test<commit_after>
|
"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from raco.expression import TypeSafetyViolation
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
def invalid_eq(self):
query = """
X = [FROM SCAN(public:adhoc:mytable) AS X EMIT clong=cstring];
STORE(X, OUTPUT);
"""
with self.assertRaises(TypeSafetyViolation):
self.check_scheme(query, None)
|
"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
Test of invalid equality test"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from raco.expression import TypeSafetyViolation
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
def invalid_eq(self):
query = """
X = [FROM SCAN(public:adhoc:mytable) AS X EMIT clong=cstring];
STORE(X, OUTPUT);
"""
with self.assertRaises(TypeSafetyViolation):
self.check_scheme(query, None)
|
<commit_before>"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
<commit_msg>Test of invalid equality test<commit_after>"""Various tests of type safety."""
import unittest
from raco.fakedb import FakeDatabase
from raco.scheme import Scheme
from raco.myrial.myrial_test import MyrialTestCase
from raco.expression import TypeSafetyViolation
from collections import Counter
class TypeTests(MyrialTestCase):
schema = Scheme(
[("clong", "LONG_TYPE"),
("cint", "INT_TYPE"),
("cstring", "STRING_TYPE"),
("cfloat", "DOUBLE_TYPE")])
def setUp(self):
super(TypeTests, self).setUp()
self.db.ingest("public:adhoc:mytable", Counter(), TypeTests.schema)
def noop_test(self):
query = """
X = SCAN(public:adhoc:mytable);
STORE(X, OUTPUT);
"""
self.check_scheme(query, TypeTests.schema)
def invalid_eq(self):
query = """
X = [FROM SCAN(public:adhoc:mytable) AS X EMIT clong=cstring];
STORE(X, OUTPUT);
"""
with self.assertRaises(TypeSafetyViolation):
self.check_scheme(query, None)
|
2048045b9b77d8cab88c0cab8e90cf72cb88b2a4
|
station.py
|
station.py
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = capacity
self.escalators = escalators
self.train_wait = train_wait
#self.arrivalrate = arrivalrate
#self.departurerate = departurerate
self.travelors_arriving = travelors_arriving
self.travelors_departing = travelors_departing
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
|
Add input parameters and test function
|
Add input parameters and test function
Added input parameters at time of instantiation.
Ref #23
|
Python
|
mit
|
ForestPride/rail-problem
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = capacity
self.escalators = escalators
self.train_wait = train_wait
#self.arrivalrate = arrivalrate
#self.departurerate = departurerate
self.travelors_arriving = travelors_arriving
self.travelors_departing = travelors_departing
Add input parameters and test function
Added input parameters at time of instantiation.
Ref #23
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
|
<commit_before>"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = capacity
self.escalators = escalators
self.train_wait = train_wait
#self.arrivalrate = arrivalrate
#self.departurerate = departurerate
self.travelors_arriving = travelors_arriving
self.travelors_departing = travelors_departing
<commit_msg>Add input parameters and test function
Added input parameters at time of instantiation.
Ref #23<commit_after>
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = capacity
self.escalators = escalators
self.train_wait = train_wait
#self.arrivalrate = arrivalrate
#self.departurerate = departurerate
self.travelors_arriving = travelors_arriving
self.travelors_departing = travelors_departing
Add input parameters and test function
Added input parameters at time of instantiation.
Ref #23"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
|
<commit_before>"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = capacity
self.escalators = escalators
self.train_wait = train_wait
#self.arrivalrate = arrivalrate
#self.departurerate = departurerate
self.travelors_arriving = travelors_arriving
self.travelors_departing = travelors_departing
<commit_msg>Add input parameters and test function
Added input parameters at time of instantiation.
Ref #23<commit_after>"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
|
0bcbd9656723726f1098899d61140a3f1a11c7ea
|
scripts/1d-load-images.py
|
scripts/1d-load-images.py
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--recompute-sizes', action='store_true', help='recompute image sizes')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info(force_compute_sizes=args.recompute_sizes)
|
Add an option to force recomputing of image dimensions.
|
Add an option to force recomputing of image dimensions.
|
Python
|
mit
|
UASLab/ImageAnalysis
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
Add an option to force recomputing of image dimensions.
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--recompute-sizes', action='store_true', help='recompute image sizes')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info(force_compute_sizes=args.recompute_sizes)
|
<commit_before>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
<commit_msg>Add an option to force recomputing of image dimensions.<commit_after>
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--recompute-sizes', action='store_true', help='recompute image sizes')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info(force_compute_sizes=args.recompute_sizes)
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
Add an option to force recomputing of image dimensions.#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--recompute-sizes', action='store_true', help='recompute image sizes')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info(force_compute_sizes=args.recompute_sizes)
|
<commit_before>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
<commit_msg>Add an option to force recomputing of image dimensions.<commit_after>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, load the image meta data
# this script produces nothing other than loading some data and quitting.
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--recompute-sizes', action='store_true', help='recompute image sizes')
args = parser.parse_args()
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info(force_compute_sizes=args.recompute_sizes)
|
f93f11f0369a5c263be5b8f078e188e7af630fba
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1'
|
Update dsub version to 0.4.1.
|
Update dsub version to 0.4.1.
PiperOrigin-RevId: 328637531
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1.dev0'
Update dsub version to 0.4.1.
PiperOrigin-RevId: 328637531
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1.dev0'
<commit_msg>Update dsub version to 0.4.1.
PiperOrigin-RevId: 328637531<commit_after>
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1.dev0'
Update dsub version to 0.4.1.
PiperOrigin-RevId: 328637531# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1.dev0'
<commit_msg>Update dsub version to 0.4.1.
PiperOrigin-RevId: 328637531<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.1'
|
fe397dccf389e31e6b39d5eaa77aedd266cef72d
|
Practice.py
|
Practice.py
|
print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
|
print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
import cmath
print cmath.sqrt(-1)
import math
print math.floor(32.8)
|
Test math and cmath module.
|
Test math and cmath module.
|
Python
|
apache-2.0
|
Vayne-Lover/Python
|
print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
Test math and cmath module.
|
print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
import cmath
print cmath.sqrt(-1)
import math
print math.floor(32.8)
|
<commit_before>print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
<commit_msg>Test math and cmath module.<commit_after>
|
print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
import cmath
print cmath.sqrt(-1)
import math
print math.floor(32.8)
|
print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
Test math and cmath module.print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
import cmath
print cmath.sqrt(-1)
import math
print math.floor(32.8)
|
<commit_before>print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
<commit_msg>Test math and cmath module.<commit_after>print 2**3
print pow(2,3)
print abs(-10)
print round(1.536,2)
print 1/2
print 1.0//2.0
print 0xAF
print 010
import cmath
print cmath.sqrt(-1)
import math
print math.floor(32.8)
|
e4964832cf330f57c4ef6dc7be942d2533c840a7
|
breakpad.py
|
breakpad.py
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb and sys.last_type is not KeyboardInterrupt:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
# Configure these values.
DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad'
def SendStack(last_tb, stack, url=None):
if not url:
url = DEFAULT_URL
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
'exception': last_tb,
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_value = getattr(sys, 'last_value', None)
if last_value and not isinstance(last_value, KeyboardInterrupt):
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
Fix KeyboardInterrupt exception filtering. Add exception information and not just the stack trace. Make the url easier to change at runtime.
|
Fix KeyboardInterrupt exception filtering.
Add exception information and not just the stack trace.
Make the url easier to change at runtime.
Review URL: http://codereview.chromium.org/2109001
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@47179 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
Python
|
bsd-3-clause
|
svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb and sys.last_type is not KeyboardInterrupt:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
Fix KeyboardInterrupt exception filtering.
Add exception information and not just the stack trace.
Make the url easier to change at runtime.
Review URL: http://codereview.chromium.org/2109001
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@47179 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
# Configure these values.
DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad'
def SendStack(last_tb, stack, url=None):
if not url:
url = DEFAULT_URL
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
'exception': last_tb,
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_value = getattr(sys, 'last_value', None)
if last_value and not isinstance(last_value, KeyboardInterrupt):
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
<commit_before># Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb and sys.last_type is not KeyboardInterrupt:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
<commit_msg>Fix KeyboardInterrupt exception filtering.
Add exception information and not just the stack trace.
Make the url easier to change at runtime.
Review URL: http://codereview.chromium.org/2109001
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@47179 4ff67af0-8c30-449e-8e8b-ad334ec8d88c<commit_after>
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
# Configure these values.
DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad'
def SendStack(last_tb, stack, url=None):
if not url:
url = DEFAULT_URL
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
'exception': last_tb,
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_value = getattr(sys, 'last_value', None)
if last_value and not isinstance(last_value, KeyboardInterrupt):
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb and sys.last_type is not KeyboardInterrupt:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
Fix KeyboardInterrupt exception filtering.
Add exception information and not just the stack trace.
Make the url easier to change at runtime.
Review URL: http://codereview.chromium.org/2109001
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@47179 4ff67af0-8c30-449e-8e8b-ad334ec8d88c# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
# Configure these values.
DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad'
def SendStack(last_tb, stack, url=None):
if not url:
url = DEFAULT_URL
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
'exception': last_tb,
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_value = getattr(sys, 'last_value', None)
if last_value and not isinstance(last_value, KeyboardInterrupt):
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
<commit_before># Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb and sys.last_type is not KeyboardInterrupt:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
<commit_msg>Fix KeyboardInterrupt exception filtering.
Add exception information and not just the stack trace.
Make the url easier to change at runtime.
Review URL: http://codereview.chromium.org/2109001
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@47179 4ff67af0-8c30-449e-8e8b-ad334ec8d88c<commit_after># Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
# Configure these values.
DEFAULT_URL = 'http://chromium-status.appspot.com/breakpad'
def SendStack(last_tb, stack, url=None):
if not url:
url = DEFAULT_URL
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
'exception': last_tb,
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_value = getattr(sys, 'last_value', None)
if last_value and not isinstance(last_value, KeyboardInterrupt):
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(repr(last_value), ''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
ad97fa93ad50bfb73c29798f9f1f24465c6a3683
|
_lua_paths.py
|
_lua_paths.py
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
paths.append(__getViewPath(view))
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
viewPath=__getViewPath(view)
if viewPath is not None:
paths.append(viewPath)
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
Fix crash if view returns no valid path
|
Fix crash if view returns no valid path
|
Python
|
mit
|
coronalabs/CoronaSDK-SublimeText,coronalabs/CoronaSDK-SublimeText
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
paths.append(__getViewPath(view))
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPathsFix crash if view returns no valid path
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
viewPath=__getViewPath(view)
if viewPath is not None:
paths.append(viewPath)
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
<commit_before>import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
paths.append(__getViewPath(view))
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths<commit_msg>Fix crash if view returns no valid path<commit_after>
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
viewPath=__getViewPath(view)
if viewPath is not None:
paths.append(viewPath)
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
paths.append(__getViewPath(view))
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPathsFix crash if view returns no valid pathimport os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
viewPath=__getViewPath(view)
if viewPath is not None:
paths.append(viewPath)
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
<commit_before>import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
paths.append(__getViewPath(view))
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths<commit_msg>Fix crash if view returns no valid path<commit_after>import os
import re
_findBackslash = re.compile("/")
# http://rosettacode.org/wiki/Find_common_directory_path#Python
def __commonprefix(*args, sep='/'):
return os.path.commonprefix(*args).rpartition(sep)[0]
def __getProjectPaths(view):
project_data=view.window().project_data()
if project_data is None:
return []
paths=[]
if "folders" in project_data:
folders=project_data["folders"]
for f in folders:
if "path" in f and os.path.isabs(f["path"]):
paths.append(f["path"])
return paths
def __getViewPath(view):
searchpath=__commonprefix(view.window().folders())
for root, dirs, files in os.walk(searchpath):
for name in files:
if "main.lua"==name:
return root
def getLuaFilesAndPaths(view,followlinks):
luaPaths=[]
paths=__getProjectPaths(view)
viewPath=__getViewPath(view)
if viewPath is not None:
paths.append(viewPath)
for path in paths:
for root, dirs, files in os.walk(path,followlinks=followlinks):
for name in files:
if ".lua" in name:
name=os.path.splitext(name)[0]
relpath=os.path.relpath(os.path.join(root, name),start=path)
luaPaths.append((name,_findBackslash.sub(".",relpath)))
return luaPaths
|
a46d2de6bb0a9605944b44971cc29126023e0623
|
commands.py
|
commands.py
|
from runcommands.commands import show_config # noqa: F401
from arctasks.base import lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
|
from runcommands.commands import show_config # noqa: F401
from arctasks.base import install, lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
|
Add install command (import from ARCTasks)
|
Add install command (import from ARCTasks)
Because `run install -u` is so much easier to type than
`pip install -U -r requirements.txt`.
|
Python
|
mit
|
wylee/django-arcutils,PSU-OIT-ARC/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils
|
from runcommands.commands import show_config # noqa: F401
from arctasks.base import lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
Add install command (import from ARCTasks)
Because `run install -u` is so much easier to type than
`pip install -U -r requirements.txt`.
|
from runcommands.commands import show_config # noqa: F401
from arctasks.base import install, lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
|
<commit_before>from runcommands.commands import show_config # noqa: F401
from arctasks.base import lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
<commit_msg>Add install command (import from ARCTasks)
Because `run install -u` is so much easier to type than
`pip install -U -r requirements.txt`.<commit_after>
|
from runcommands.commands import show_config # noqa: F401
from arctasks.base import install, lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
|
from runcommands.commands import show_config # noqa: F401
from arctasks.base import lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
Add install command (import from ARCTasks)
Because `run install -u` is so much easier to type than
`pip install -U -r requirements.txt`.from runcommands.commands import show_config # noqa: F401
from arctasks.base import install, lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
|
<commit_before>from runcommands.commands import show_config # noqa: F401
from arctasks.base import lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
<commit_msg>Add install command (import from ARCTasks)
Because `run install -u` is so much easier to type than
`pip install -U -r requirements.txt`.<commit_after>from runcommands.commands import show_config # noqa: F401
from arctasks.base import install, lint # noqa: F401
from arctasks.python import show_upgraded_packages # noqa: F401
from arctasks.release import * # noqa: F401,F403
|
d18919060fde86baaa1bd6fed561872dfe4cc37f
|
oam_base/urls.py
|
oam_base/urls.py
|
from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'
|
from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied/$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'
|
Make the ratelimited error URL follow established conventions.
|
Make the ratelimited error URL follow established conventions.
|
Python
|
mit
|
hhauer/myinfo,hhauer/myinfo,hhauer/myinfo,hhauer/myinfo
|
from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'Make the ratelimited error URL follow established conventions.
|
from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied/$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'
|
<commit_before>from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'<commit_msg>Make the ratelimited error URL follow established conventions.<commit_after>
|
from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied/$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'
|
from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'Make the ratelimited error URL follow established conventions.from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied/$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'
|
<commit_before>from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'<commit_msg>Make the ratelimited error URL follow established conventions.<commit_after>from django.conf.urls import patterns, include, url
from django.core.urlresolvers import reverse_lazy
from MyInfo import views as my_info_views
from django_cas import views as cas_views
from oam_base import views as base_views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', my_info_views.index, name='index'),
url(r'^MyInfo/', include('MyInfo.urls', namespace='MyInfo')),
url(r'^AccountPickup/', include('AccountPickup.urls', namespace='AccountPickup')),
url(r'^PasswordReset/', include('PasswordReset.urls', namespace='PasswordReset')),
url(r'^accounts/login/$', cas_views.login, {'next_page': reverse_lazy('AccountPickup:next_step')}, name='CASLogin'),
url(r'^accounts/logout/$', cas_views.logout, name='CASLogout'),
url(r'^error/denied/$', base_views.rate_limited, name='rate_limited'),
url(r'^ajax/', include('ajax.urls')),
url(r'^admin/', include(admin.site.urls)),
)
handler500 = 'oam_base.views.custom_error'
|
a0151bb7934beac7f5db3c79c60d7335a594d29a
|
alg_minmax.py
|
alg_minmax.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def find_min_max_dc(a_ls):
"""Find mix & max in a list by divide and conquer method."""
if len(a_ls) == 1:
return [a_ls[0], a_ls[0]]
elif 1 < len(a_ls) < 3:
if a_ls[0] < a_ls[1]:
return [a_ls[0], a_ls[1]]
else:
return [a_ls[1], a_ls[0]]
else:
[_min1, _max1] = find_min_max_dc(a_ls[:len(a_ls)//2])
[_min2, _max2] = find_min_max_dc(a_ls[len(a_ls)//2:])
return [min(_min1, _min2), max(_max1, _max2)]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
print('By divide and conquer method: {}'
.format(find_min_max_dc(a_ls)))
if __name__ == '__main__':
main()
|
Complete find_min_max_dc() with divide and conquer method
|
Complete find_min_max_dc() with divide and conquer method
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
if __name__ == '__main__':
main()
Complete find_min_max_dc() with divide and conquer method
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def find_min_max_dc(a_ls):
"""Find mix & max in a list by divide and conquer method."""
if len(a_ls) == 1:
return [a_ls[0], a_ls[0]]
elif 1 < len(a_ls) < 3:
if a_ls[0] < a_ls[1]:
return [a_ls[0], a_ls[1]]
else:
return [a_ls[1], a_ls[0]]
else:
[_min1, _max1] = find_min_max_dc(a_ls[:len(a_ls)//2])
[_min2, _max2] = find_min_max_dc(a_ls[len(a_ls)//2:])
return [min(_min1, _min2), max(_max1, _max2)]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
print('By divide and conquer method: {}'
.format(find_min_max_dc(a_ls)))
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
if __name__ == '__main__':
main()
<commit_msg>Complete find_min_max_dc() with divide and conquer method<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def find_min_max_dc(a_ls):
"""Find mix & max in a list by divide and conquer method."""
if len(a_ls) == 1:
return [a_ls[0], a_ls[0]]
elif 1 < len(a_ls) < 3:
if a_ls[0] < a_ls[1]:
return [a_ls[0], a_ls[1]]
else:
return [a_ls[1], a_ls[0]]
else:
[_min1, _max1] = find_min_max_dc(a_ls[:len(a_ls)//2])
[_min2, _max2] = find_min_max_dc(a_ls[len(a_ls)//2:])
return [min(_min1, _min2), max(_max1, _max2)]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
print('By divide and conquer method: {}'
.format(find_min_max_dc(a_ls)))
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
if __name__ == '__main__':
main()
Complete find_min_max_dc() with divide and conquer methodfrom __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def find_min_max_dc(a_ls):
"""Find mix & max in a list by divide and conquer method."""
if len(a_ls) == 1:
return [a_ls[0], a_ls[0]]
elif 1 < len(a_ls) < 3:
if a_ls[0] < a_ls[1]:
return [a_ls[0], a_ls[1]]
else:
return [a_ls[1], a_ls[0]]
else:
[_min1, _max1] = find_min_max_dc(a_ls[:len(a_ls)//2])
[_min2, _max2] = find_min_max_dc(a_ls[len(a_ls)//2:])
return [min(_min1, _min2), max(_max1, _max2)]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
print('By divide and conquer method: {}'
.format(find_min_max_dc(a_ls)))
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
if __name__ == '__main__':
main()
<commit_msg>Complete find_min_max_dc() with divide and conquer method<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def find_min_max_naive(a_ls):
"""Find mix & max in a list by naive method."""
_min = a_ls[0]
_max = a_ls[0]
for i in range(1, len(a_ls)):
_min = min(_min, a_ls[i])
_max = max(_max, a_ls[i])
return [_min, _max]
def find_min_max_dc(a_ls):
"""Find mix & max in a list by divide and conquer method."""
if len(a_ls) == 1:
return [a_ls[0], a_ls[0]]
elif 1 < len(a_ls) < 3:
if a_ls[0] < a_ls[1]:
return [a_ls[0], a_ls[1]]
else:
return [a_ls[1], a_ls[0]]
else:
[_min1, _max1] = find_min_max_dc(a_ls[:len(a_ls)//2])
[_min2, _max2] = find_min_max_dc(a_ls[len(a_ls)//2:])
return [min(_min1, _min2), max(_max1, _max2)]
def main():
# a_ls = [1, 2, 3, 4, 5, 6, 7, 8]
a_ls = [1, 2, 3, 4, 5, 6, 7, 8, 9]
print('A list: {}'.format(a_ls))
print('Find min & max:')
print('By naive method: {}'
.format(find_min_max_naive(a_ls)))
print('By divide and conquer method: {}'
.format(find_min_max_dc(a_ls)))
if __name__ == '__main__':
main()
|
85bba7e080100ed7154330c8b0d1476bc3718e28
|
one_time_eval.py
|
one_time_eval.py
|
# usage: python one_time_eval.py as8sqdtc
# usage: python one_time_eval.py as8sqdtc 2skskd
from convenience import find_pcts, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
print find_pcts(p[0], p[1], board, iter = 10000)
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
|
Add support for multi-way pots.
|
Add support for multi-way pots.
|
Python
|
mit
|
zimolzak/poker-experiments,zimolzak/poker-experiments,zimolzak/poker-experiments
|
# usage: python one_time_eval.py as8sqdtc
# usage: python one_time_eval.py as8sqdtc 2skskd
from convenience import find_pcts, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
print find_pcts(p[0], p[1], board, iter = 10000)
Add support for multi-way pots.
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
|
<commit_before># usage: python one_time_eval.py as8sqdtc
# usage: python one_time_eval.py as8sqdtc 2skskd
from convenience import find_pcts, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
print find_pcts(p[0], p[1], board, iter = 10000)
<commit_msg>Add support for multi-way pots.<commit_after>
|
# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
|
# usage: python one_time_eval.py as8sqdtc
# usage: python one_time_eval.py as8sqdtc 2skskd
from convenience import find_pcts, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
print find_pcts(p[0], p[1], board, iter = 10000)
Add support for multi-way pots.# usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
|
<commit_before># usage: python one_time_eval.py as8sqdtc
# usage: python one_time_eval.py as8sqdtc 2skskd
from convenience import find_pcts, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
print find_pcts(p[0], p[1], board, iter = 10000)
<commit_msg>Add support for multi-way pots.<commit_after># usage: python one_time_eval.py hole_cards [board_cards]
# examples:
# python one_time_eval.py as8sqdtc
# python one_time_eval.py as8sqdtc 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd
# python one_time_eval.py as8sqdtc3d3c 2skskd3h5s
from convenience import find_pcts_multi, pr, str2cards
import sys
## argv to strings
hole_cards_str = sys.argv[1]
board_str = ''
if len(sys.argv) > 2:
board_str = sys.argv[2]
## strings to lists of Card objects
hole_cards = str2cards(hole_cards_str)
board = str2cards(board_str)
## hole card list to player list-of-lists
assert len(hole_cards) % 2 == 0
n_players = len(hole_cards) / 2
assert n_players > 1
p = []
for i in range(n_players):
pi = hole_cards[i * 2 : i * 2 + 2]
pr(pi)
p.append(pi)
print "Board",
pr(board)
percents = find_pcts_multi(p, board, iter = 10000)
print [round(x, 4) for x in percents]
|
c5e319363727f332b04ac863e494cb04c52c91b5
|
drupal/Revert.py
|
drupal/Revert.py
|
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
Add back the stable_build variable which is needed in the _revert_settings() function. Woops.
|
Add back the stable_build variable which is needed in the _revert_settings() function. Woops.
|
Python
|
mit
|
codeenigma/deployments,codeenigma/deployments,codeenigma/deployments,codeenigma/deployments
|
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
Add back the stable_build variable which is needed in the _revert_settings() function. Woops.
|
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
<commit_before>from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
<commit_msg>Add back the stable_build variable which is needed in the _revert_settings() function. Woops.<commit_after>
|
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
Add back the stable_build variable which is needed in the _revert_settings() function. Woops.from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
<commit_before>from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
<commit_msg>Add back the stable_build variable which is needed in the _revert_settings() function. Woops.<commit_after>from fabric.api import *
from fabric.contrib.files import sed
import random
import string
import time
# Custom Code Enigma modules
import Drupal
import common.MySQL
# Small function to revert db
@task
@roles('app_primary')
def _revert_db(repo, branch, build, buildtype, site):
print "===> Reverting the database..."
drush_runtime_location = "/var/www/live.%s.%s/www/sites/%s" % (repo, branch, site)
drush_output = Drupal.drush_status(repo, branch, build, buildtype, site, drush_runtime_location)
db_name = Drupal.get_db_name(repo, branch, build, buildtype, site, drush_output)
common.MySQL.mysql_revert_db(db_name, build)
# Function to revert settings.php change for when a build fails and database is reverted
@task
@roles('app_all')
def _revert_settings(repo, branch, build, buildtype, site, alias):
print "===> Reverting the settings..."
with settings(warn_only=True):
stable_build = run("readlink /var/www/live.%s.%s" % (repo, branch))
if sudo('sed -i.bak "s:/var/www/.*\.settings\.php:%s/www/sites/%s/%s.settings.php:g" %s' % (stable_build, site, buildtype, settings_file)).failed:
print "===> Could not revert settings.php. Manual intervention required."
else:
print "===> Reverted settings.php"
|
d600fc56127f234a7a14b4a89be14b5c31b072e7
|
examples/edge_test.py
|
examples/edge_test.py
|
"""
This test is only for Microsoft Edge (Chromium)!
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
|
"""
This test is only for Microsoft Edge (Chromium)!
(Tested on Edge Version 89.0.774.54)
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
Update the Edge example test
|
Update the Edge example test
|
Python
|
mit
|
mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase
|
"""
This test is only for Microsoft Edge (Chromium)!
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
Update the Edge example test
|
"""
This test is only for Microsoft Edge (Chromium)!
(Tested on Edge Version 89.0.774.54)
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
<commit_before>"""
This test is only for Microsoft Edge (Chromium)!
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
<commit_msg>Update the Edge example test<commit_after>
|
"""
This test is only for Microsoft Edge (Chromium)!
(Tested on Edge Version 89.0.774.54)
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
"""
This test is only for Microsoft Edge (Chromium)!
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
Update the Edge example test"""
This test is only for Microsoft Edge (Chromium)!
(Tested on Edge Version 89.0.774.54)
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
<commit_before>"""
This test is only for Microsoft Edge (Chromium)!
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.assert_element('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div')
self.highlight('#section_about div + div > div')
self.highlight('img[alt="Edge logo"]')
self.highlight('img[alt="Edge logo"] + span')
self.highlight('#section_about div + div > div + div')
self.highlight('#section_about div + div > div + div + div > div')
<commit_msg>Update the Edge example test<commit_after>"""
This test is only for Microsoft Edge (Chromium)!
(Tested on Edge Version 89.0.774.54)
"""
from seleniumbase import BaseCase
class EdgeTests(BaseCase):
def test_edge(self):
if self.browser != "edge":
print("\n This test is only for Microsoft Edge (Chromium)!")
print(' (Run this test using "--edge" or "--browser=edge")')
self.skip('Use "--edge" or "--browser=edge"')
self.open("edge://settings/help")
self.highlight('div[role="main"]')
self.highlight('img[srcset*="logo"]')
self.assert_text("Microsoft Edge", 'img[srcset*="logo"] + div')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(1)')
self.highlight('img[srcset*="logo"] + div span:nth-of-type(2)')
self.highlight('span[aria-live="assertive"]')
self.highlight('a[href*="chromium"]')
|
166fd73f5ac4501b9357d0263e6b68888836588a
|
tests/test_cookiecutter_invocation.py
|
tests/test_cookiecutter_invocation.py
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(monkeypatch, capfd):
monkeypatch.setenv('PYTHONPATH', '.')
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call([sys.executable, '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
Use sys.executable when invoking python interpreter from tests
|
Use sys.executable when invoking python interpreter from tests
When we only have python3 installed, the test for missing argument is
failing because there is no "python" executable. Use `sys.executable`
instead. Also set environment correctly, like done in 7024d3b36176.
|
Python
|
bsd-3-clause
|
audreyr/cookiecutter,hackebrot/cookiecutter,pjbull/cookiecutter,michaeljoseph/cookiecutter,michaeljoseph/cookiecutter,luzfcb/cookiecutter,hackebrot/cookiecutter,audreyr/cookiecutter,pjbull/cookiecutter,luzfcb/cookiecutter
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
Use sys.executable when invoking python interpreter from tests
When we only have python3 installed, the test for missing argument is
failing because there is no "python" executable. Use `sys.executable`
instead. Also set environment correctly, like done in 7024d3b36176.
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(monkeypatch, capfd):
monkeypatch.setenv('PYTHONPATH', '.')
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call([sys.executable, '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
<commit_before># -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
<commit_msg>Use sys.executable when invoking python interpreter from tests
When we only have python3 installed, the test for missing argument is
failing because there is no "python" executable. Use `sys.executable`
instead. Also set environment correctly, like done in 7024d3b36176.<commit_after>
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(monkeypatch, capfd):
monkeypatch.setenv('PYTHONPATH', '.')
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call([sys.executable, '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
Use sys.executable when invoking python interpreter from tests
When we only have python3 installed, the test for missing argument is
failing because there is no "python" executable. Use `sys.executable`
instead. Also set environment correctly, like done in 7024d3b36176.# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(monkeypatch, capfd):
monkeypatch.setenv('PYTHONPATH', '.')
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call([sys.executable, '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
<commit_before># -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
<commit_msg>Use sys.executable when invoking python interpreter from tests
When we only have python3 installed, the test for missing argument is
failing because there is no "python" executable. Use `sys.executable`
instead. Also set environment correctly, like done in 7024d3b36176.<commit_after># -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(monkeypatch, capfd):
monkeypatch.setenv('PYTHONPATH', '.')
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call([sys.executable, '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "TEMPLATE".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
@pytest.mark.usefixtures('clean_system')
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
5eef9eaf6fabe00281c480fd5886917596066a50
|
buildcert.py
|
buildcert.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/client/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
|
Fix spelling mistake in path for certificates
|
Fix spelling mistake in path for certificates
|
Python
|
mit
|
freifunk-berlin/ca.berlin.freifunk.net,freifunk-berlin/ca.berlin.freifunk.net,freifunk-berlin/ca.berlin.freifunk.net
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/client/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
Fix spelling mistake in path for certificates
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/client/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
<commit_msg>Fix spelling mistake in path for certificates<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/client/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
Fix spelling mistake in path for certificates#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/client/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
<commit_msg>Fix spelling mistake in path for certificates<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import datetime
from subprocess import call
from ca import app, db, mail
from ca.models import Request
from flask import Flask, render_template
from flask_mail import Message
def mail_certificate(id, email):
msg = Message('Freifunk Vpn03 Key', sender = 'no-reply@ca.berlin.freifunk.net', recipients = [email])
msg.body = render_template('mail.txt')
with app.open_resource("/etc/openvpn/clients/freifunk_{}.tgz".format(id)) as fp:
msg.attach("freifunk_{}.tgz".format(id), "application/gzip", fp.read())
mail.send(msg)
for request in Request.query.filter(Request.generation_date == None).all(): # noqa
prompt = "Do you want to generate a certificate for {}, {} ?"
print(prompt.format(request.id, request.email))
print("Type y to continue")
confirm = input('>')
if confirm in ['Y', 'y']:
print('generating certificate')
call([app.config['COMMAND_BUILD'], request.id, request.email])
#call([app.config['COMMAND_MAIL'], request.id, request.email])
mail_certificate(request.id, request.email)
request.generation_date = datetime.date.today()
db.session.commit()
print()
else:
print('skipping generation \n')
|
f48344f8b961971eb0946bcb4066df021f3eef8a
|
tinysrt.py
|
tinysrt.py
|
#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt_file_handle):
for match in SUBTITLE_REGEX.finditer(srt_file_handle.read()):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
|
#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
|
Allow passing a string since we're just going to .read() the FH
|
parse(): Allow passing a string since we're just going to .read() the FH
|
Python
|
mit
|
cdown/srt
|
#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt_file_handle):
for match in SUBTITLE_REGEX.finditer(srt_file_handle.read()):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
parse(): Allow passing a string since we're just going to .read() the FH
|
#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
|
<commit_before>#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt_file_handle):
for match in SUBTITLE_REGEX.finditer(srt_file_handle.read()):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
<commit_msg>parse(): Allow passing a string since we're just going to .read() the FH<commit_after>
|
#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
|
#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt_file_handle):
for match in SUBTITLE_REGEX.finditer(srt_file_handle.read()):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
parse(): Allow passing a string since we're just going to .read() the FH#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
|
<commit_before>#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt_file_handle):
for match in SUBTITLE_REGEX.finditer(srt_file_handle.read()):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
<commit_msg>parse(): Allow passing a string since we're just going to .read() the FH<commit_after>#!/usr/bin/env python
import re
import datetime
from collections import namedtuple
SUBTITLE_REGEX = re.compile(r'''\
(\d+)
(\d+:\d+:\d+,\d+) --> (\d+:\d+:\d+,\d+)
(.+)
''')
Subtitle = namedtuple('Subtitle', ['index', 'start', 'end', 'content'])
def parse_time(time):
hours, minutes, seconds, milliseconds = map(int, re.split('[,:]', time))
return datetime.timedelta(
hours=hours,
minutes=minutes,
seconds=seconds,
milliseconds=milliseconds,
)
def parse(srt):
for match in SUBTITLE_REGEX.finditer(srt):
raw_index, raw_start, raw_end, content = match.groups()
yield Subtitle(
index=int(raw_index), start=parse_time(raw_start),
end=parse_time(raw_end), content=content,
)
|
19a96cb5b687e580bd3bda348a47255394da7826
|
tools/telemetry/telemetry/core/platform/power_monitor/ippet_power_monitor_unittest.py
|
tools/telemetry/telemetry/core/platform/power_monitor/ippet_power_monitor_unittest.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Enabled('win')
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Disabled
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
|
Disable IppetPowerMonitorTest.testFindOrInstallIppet which is failing on new trybots.
|
Disable IppetPowerMonitorTest.testFindOrInstallIppet which is failing on new trybots.
BUG=424027
TBR=dtu@chromium.org
Review URL: https://codereview.chromium.org/643763005
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#299833}
|
Python
|
bsd-3-clause
|
dushu1203/chromium.src,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,dednal/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,jaruba/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,Fireblend/chromium-crosswalk,dushu1203/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,ltilve/chromium,Pluto-tv/chromium-crosswalk,dednal/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk,dednal/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,Jonekee/chromium.src,ltilve/chromium,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,axinging/chromium-crosswalk,axinging/chromium-crosswalk,M4sse/chromium.src,ltilve/chromium,ltilve/chromium,M4sse/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,jaruba/chromium.src,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,Chilledheart/chromium,Just-D/chromium-1,axinging/chromium-crosswalk,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,ltilve/chromium,dushu1203/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,M4sse/chromium.src
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Enabled('win')
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
Disable IppetPowerMonitorTest.testFindOrInstallIppet which is failing on new trybots.
BUG=424027
TBR=dtu@chromium.org
Review URL: https://codereview.chromium.org/643763005
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#299833}
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Disabled
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Enabled('win')
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
<commit_msg>Disable IppetPowerMonitorTest.testFindOrInstallIppet which is failing on new trybots.
BUG=424027
TBR=dtu@chromium.org
Review URL: https://codereview.chromium.org/643763005
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#299833}<commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Disabled
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Enabled('win')
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
Disable IppetPowerMonitorTest.testFindOrInstallIppet which is failing on new trybots.
BUG=424027
TBR=dtu@chromium.org
Review URL: https://codereview.chromium.org/643763005
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#299833}# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Disabled
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
|
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Enabled('win')
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
<commit_msg>Disable IppetPowerMonitorTest.testFindOrInstallIppet which is failing on new trybots.
BUG=424027
TBR=dtu@chromium.org
Review URL: https://codereview.chromium.org/643763005
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#299833}<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import unittest
from telemetry import decorators
from telemetry.core.platform import win_platform_backend
from telemetry.core.platform.power_monitor import ippet_power_monitor
class IppetPowerMonitorTest(unittest.TestCase):
@decorators.Disabled
def testFindOrInstallIppet(self):
self.assertTrue(ippet_power_monitor.IppetPath())
@decorators.Enabled('win')
def testIppetRunsWithoutErrors(self):
# Very basic test, doesn't validate any output data.
platform_backend = win_platform_backend.WinPlatformBackend()
power_monitor = ippet_power_monitor.IppetPowerMonitor(platform_backend)
if not power_monitor.CanMonitorPower():
logging.warning('Test not supported on this platform.')
return
power_monitor.StartMonitoringPower(None)
statistics = power_monitor.StopMonitoringPower()
self.assertEqual(statistics['identifier'], 'ippet')
|
cc09da295d61965af1552b35b7ece0caf4e5a399
|
accountant/interface/forms.py
|
accountant/interface/forms.py
|
# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
|
# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
widgets = {
'game': forms.HiddenInput(),
}
|
Hide Game ID input since it is automatically set
|
Hide Game ID input since it is automatically set
|
Python
|
mit
|
XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant,XeryusTC/18xx-accountant
|
# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
Hide Game ID input since it is automatically set
|
# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
widgets = {
'game': forms.HiddenInput(),
}
|
<commit_before># -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
<commit_msg>Hide Game ID input since it is automatically set<commit_after>
|
# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
widgets = {
'game': forms.HiddenInput(),
}
|
# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
Hide Game ID input since it is automatically set# -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
widgets = {
'game': forms.HiddenInput(),
}
|
<commit_before># -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
<commit_msg>Hide Game ID input since it is automatically set<commit_after># -*- coding: utf-8 -*-
from django import forms
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.translation import ugettext_lazy as _
from core import models
DUPLICATE_PLAYER_ERROR = \
_('There is already a player with this name in your game')
class CreateGameForm(forms.Form):
bank_cash = forms.IntegerField(required=False, initial=12000)
def clean_bank_cash(self):
data = self.cleaned_data['bank_cash']
if data == None:
data = 0
return data
class AddPlayerForm(forms.ModelForm):
class Meta:
model = models.Player
fields = ('game', 'name', 'cash')
error_messages = {
NON_FIELD_ERRORS: {'unique_together': DUPLICATE_PLAYER_ERROR},
}
widgets = {
'game': forms.HiddenInput(),
}
|
c9ca9ae51ebc976bc60b982b9e98f68325301aea
|
corehq/util/es/interface.py
|
corehq/util/es/interface.py
|
class ElasticsearchInterface(object):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
|
import abc
from django.conf import settings
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
pass
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_deprecated_index_settings = (
'merge.policy.merge_factor',
)
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._deprecated_index_settings
}
}
super(ElasticsearchInterface2, self).update_index_settings(index, settings_dict)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
|
Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2
|
Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
class ElasticsearchInterface(object):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2
|
import abc
from django.conf import settings
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
pass
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_deprecated_index_settings = (
'merge.policy.merge_factor',
)
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._deprecated_index_settings
}
}
super(ElasticsearchInterface2, self).update_index_settings(index, settings_dict)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
|
<commit_before>class ElasticsearchInterface(object):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
<commit_msg>Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2<commit_after>
|
import abc
from django.conf import settings
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
pass
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_deprecated_index_settings = (
'merge.policy.merge_factor',
)
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._deprecated_index_settings
}
}
super(ElasticsearchInterface2, self).update_index_settings(index, settings_dict)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
|
class ElasticsearchInterface(object):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2import abc
from django.conf import settings
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
pass
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_deprecated_index_settings = (
'merge.policy.merge_factor',
)
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._deprecated_index_settings
}
}
super(ElasticsearchInterface2, self).update_index_settings(index, settings_dict)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
|
<commit_before>class ElasticsearchInterface(object):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
<commit_msg>Split ElasticsearchInterface into ElasticsearchInterface1 and ElasticsearchInterface2<commit_after>import abc
from django.conf import settings
class AbstractElasticsearchInterface(metaclass=abc.ABCMeta):
def __init__(self, es):
self.es = es
def update_index_settings(self, index, settings_dict):
return self.es.indices.put_settings(settings_dict, index=index)
class ElasticsearchInterface1(AbstractElasticsearchInterface):
pass
class ElasticsearchInterface2(AbstractElasticsearchInterface):
_deprecated_index_settings = (
'merge.policy.merge_factor',
)
def update_index_settings(self, index, settings_dict):
assert set(settings_dict.keys()) == {'index'}, settings_dict.keys()
settings_dict = {
"index": {
key: value for key, value in settings_dict['index'].items()
if key not in self._deprecated_index_settings
}
}
super(ElasticsearchInterface2, self).update_index_settings(index, settings_dict)
ElasticsearchInterface = {
1: ElasticsearchInterface1,
2: ElasticsearchInterface2,
}[settings.ELASTICSEARCH_MAJOR_VERSION]
|
22988dad36645bb5b1f6023579757d5a0fca3a9e
|
dvox/app.py
|
dvox/app.py
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 30
}
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10
}
|
Reduce chunk lock timeout for testing
|
Reduce chunk lock timeout for testing
|
Python
|
mit
|
numberoverzero/dvox
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 30
}
Reduce chunk lock timeout for testing
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10
}
|
<commit_before>config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 30
}
<commit_msg>Reduce chunk lock timeout for testing<commit_after>
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10
}
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 30
}
Reduce chunk lock timeout for testingconfig = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10
}
|
<commit_before>config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 30
}
<commit_msg>Reduce chunk lock timeout for testing<commit_after>config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10
}
|
3380091215ef3918449f1a49210cb23de39603ea
|
docs/conf.py
|
docs/conf.py
|
project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
html_theme = 'nature'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
Use the nature theme as it's nicer on our logo
|
Use the nature theme as it's nicer on our logo
|
Python
|
bsd-3-clause
|
playfire/django-bcrypt
|
project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
Use the nature theme as it's nicer on our logo
|
project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
html_theme = 'nature'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
<commit_before>project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
<commit_msg>Use the nature theme as it's nicer on our logo<commit_after>
|
project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
html_theme = 'nature'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
Use the nature theme as it's nicer on our logoproject = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
html_theme = 'nature'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
<commit_before>project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
<commit_msg>Use the nature theme as it's nicer on our logo<commit_after>project = 'django-bcrypt'
version = ''
release = ''
copyright = '2010, 2011 UUMC Ltd.'
html_logo = 'playfire.png'
html_theme = 'nature'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
html_title = "%s documentation" % project
master_doc = 'index'
exclude_trees = ['_build']
templates_path = ['_templates']
latex_documents = [
('index', '%s.tex' % project, html_title, u'Playfire', 'manual'),
]
intersphinx_mapping = {'http://docs.python.org/': None}
|
a97f3948c0f9c2b7e446c70a2f158b38a6c9365b
|
modules/play/play.py
|
modules/play/play.py
|
from flask import Blueprint, request, url_for, render_template
from flask.ext.security import current_user
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
|
from flask import Blueprint, request, url_for, render_template
from flask import redirect
from flask.ext.security import current_user, AnonymousUser
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
if hasattr(current_user, 'api_token'):
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
else:
return redirect('/account/login')
|
Fix error if anonymous user is logged in, instead redirect to login page
|
Fix error if anonymous user is logged in, instead redirect to login page
|
Python
|
mit
|
KanColleTool/kcsrv,KanColleTool/kcsrv,KanColleTool/kcsrv
|
from flask import Blueprint, request, url_for, render_template
from flask.ext.security import current_user
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
Fix error if anonymous user is logged in, instead redirect to login page
|
from flask import Blueprint, request, url_for, render_template
from flask import redirect
from flask.ext.security import current_user, AnonymousUser
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
if hasattr(current_user, 'api_token'):
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
else:
return redirect('/account/login')
|
<commit_before>from flask import Blueprint, request, url_for, render_template
from flask.ext.security import current_user
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
<commit_msg>Fix error if anonymous user is logged in, instead redirect to login page<commit_after>
|
from flask import Blueprint, request, url_for, render_template
from flask import redirect
from flask.ext.security import current_user, AnonymousUser
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
if hasattr(current_user, 'api_token'):
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
else:
return redirect('/account/login')
|
from flask import Blueprint, request, url_for, render_template
from flask.ext.security import current_user
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
Fix error if anonymous user is logged in, instead redirect to login pagefrom flask import Blueprint, request, url_for, render_template
from flask import redirect
from flask.ext.security import current_user, AnonymousUser
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
if hasattr(current_user, 'api_token'):
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
else:
return redirect('/account/login')
|
<commit_before>from flask import Blueprint, request, url_for, render_template
from flask.ext.security import current_user
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
<commit_msg>Fix error if anonymous user is logged in, instead redirect to login page<commit_after>from flask import Blueprint, request, url_for, render_template
from flask import redirect
from flask.ext.security import current_user, AnonymousUser
play = Blueprint('play', __name__, template_folder='templates')
@play.route('/')
def index():
if hasattr(current_user, 'api_token'):
api_token = current_user.api_token
return render_template('play/index.html', api_token=api_token)
else:
return redirect('/account/login')
|
764a6387ddd117c5dc55b5345fcf7ebab5a01190
|
addons/zotero/serializer.py
|
addons/zotero/serializer.py
|
from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
return result
|
from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
result['groups'] = self.node_settings.fetch_groups
return result
|
Add groups key in response to zotero/settings.
|
Add groups key in response to zotero/settings.
|
Python
|
apache-2.0
|
brianjgeiger/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,mfraezz/osf.io,chennan47/osf.io,cslzchen/osf.io,adlius/osf.io,aaxelb/osf.io,caseyrollins/osf.io,erinspace/osf.io,aaxelb/osf.io,mattclark/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,binoculars/osf.io,chennan47/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,mfraezz/osf.io,saradbowman/osf.io,sloria/osf.io,adlius/osf.io,adlius/osf.io,caseyrollins/osf.io,icereval/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,mattclark/osf.io,erinspace/osf.io,sloria/osf.io,felliott/osf.io,Johnetordoff/osf.io,icereval/osf.io,adlius/osf.io,pattisdr/osf.io,binoculars/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,felliott/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,brianjgeiger/osf.io,chennan47/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,binoculars/osf.io,felliott/osf.io,cslzchen/osf.io,pattisdr/osf.io,baylee-d/osf.io,sloria/osf.io,mfraezz/osf.io,aaxelb/osf.io,cslzchen/osf.io
|
from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
return result
Add groups key in response to zotero/settings.
|
from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
result['groups'] = self.node_settings.fetch_groups
return result
|
<commit_before>from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
return result
<commit_msg>Add groups key in response to zotero/settings.<commit_after>
|
from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
result['groups'] = self.node_settings.fetch_groups
return result
|
from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
return result
Add groups key in response to zotero/settings.from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
result['groups'] = self.node_settings.fetch_groups
return result
|
<commit_before>from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
return result
<commit_msg>Add groups key in response to zotero/settings.<commit_after>from addons.base.serializer import CitationsAddonSerializer
class ZoteroSerializer(CitationsAddonSerializer):
addon_short_name = 'zotero'
@property
def serialized_node_settings(self):
result = super(ZoteroSerializer, self).serialized_node_settings
result['library'] = {
'name': self.node_settings.fetch_library_name
}
result['groups'] = self.node_settings.fetch_groups
return result
|
da619869c8d321863a1cc081189ebda79e1b5dbc
|
djclick/test/test_params.py
|
djclick/test/test_params.py
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
# Use `.endswith()` because of differences between CPython and pypy
assert str(e).endswith(
'BadParameter: could not find testapp.DummyModel with {}'.format(
error_message))
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
assert e.match(
'could not find testapp.DummyModel with {}'.format(error_message))
|
Fix a check for specific formatting of an error message
|
tests: Fix a check for specific formatting of an error message
Instead of checking for the specific formatting of pytest's wrapper
around an exception, check the error message with `ExceptionInfo.match`.
This improves compatibility with different versions of pytest.
|
Python
|
mit
|
GaretJax/django-click
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
# Use `.endswith()` because of differences between CPython and pypy
assert str(e).endswith(
'BadParameter: could not find testapp.DummyModel with {}'.format(
error_message))
tests: Fix a check for specific formatting of an error message
Instead of checking for the specific formatting of pytest's wrapper
around an exception, check the error message with `ExceptionInfo.match`.
This improves compatibility with different versions of pytest.
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
assert e.match(
'could not find testapp.DummyModel with {}'.format(error_message))
|
<commit_before>from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
# Use `.endswith()` because of differences between CPython and pypy
assert str(e).endswith(
'BadParameter: could not find testapp.DummyModel with {}'.format(
error_message))
<commit_msg>tests: Fix a check for specific formatting of an error message
Instead of checking for the specific formatting of pytest's wrapper
around an exception, check the error message with `ExceptionInfo.match`.
This improves compatibility with different versions of pytest.<commit_after>
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
assert e.match(
'could not find testapp.DummyModel with {}'.format(error_message))
|
from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
# Use `.endswith()` because of differences between CPython and pypy
assert str(e).endswith(
'BadParameter: could not find testapp.DummyModel with {}'.format(
error_message))
tests: Fix a check for specific formatting of an error message
Instead of checking for the specific formatting of pytest's wrapper
around an exception, check the error message with `ExceptionInfo.match`.
This improves compatibility with different versions of pytest.from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
assert e.match(
'could not find testapp.DummyModel with {}'.format(error_message))
|
<commit_before>from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
# Use `.endswith()` because of differences between CPython and pypy
assert str(e).endswith(
'BadParameter: could not find testapp.DummyModel with {}'.format(
error_message))
<commit_msg>tests: Fix a check for specific formatting of an error message
Instead of checking for the specific formatting of pytest's wrapper
around an exception, check the error message with `ExceptionInfo.match`.
This improves compatibility with different versions of pytest.<commit_after>from click.exceptions import BadParameter
import pytest
from djclick import params
@pytest.mark.django_db
def test_modelinstance_init():
from testapp.models import DummyModel
from django.db.models.query import QuerySet
param = params.ModelInstance(DummyModel)
assert isinstance(param.qs, QuerySet)
qs = DummyModel.objects.all()
param = params.ModelInstance(qs)
assert param.qs is qs
@pytest.mark.django_db
@pytest.mark.parametrize(
('arg', 'value'),
(
('--pk', '1'),
('--slug', 'test'),
('--endswith', 'st'),
)
)
def test_convert_ok(call_command, arg, value):
from testapp.models import DummyModel
DummyModel.objects.create(pk=1, slug='test')
expected = b'<DummyModel: 1>'
assert call_command('modelcmd', arg, value).stdout == expected
@pytest.mark.django_db
@pytest.mark.parametrize(
('args', 'error_message'),
(
(('--pk', '99'), "pk=99"),
(('--slug', 'doesnotexist'), "slug=doesnotexist"),
)
)
def test_convert_fail(call_command, args, error_message):
with pytest.raises(BadParameter) as e:
call_command('modelcmd', *args)
assert e.match(
'could not find testapp.DummyModel with {}'.format(error_message))
|
6c870e242914d40601bf7ad24e48af2b0d28559e
|
notification/urls.py
|
notification/urls.py
|
from django.conf.urls.defaults import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
try:
from django.conf.urls.defaults import *
except ImportError:
from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
Change to work with django 1.7
|
Change to work with django 1.7
|
Python
|
mit
|
daniell/django-notification,daniell/django-notification
|
from django.conf.urls.defaults import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
Change to work with django 1.7
|
try:
from django.conf.urls.defaults import *
except ImportError:
from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
<commit_before>from django.conf.urls.defaults import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
<commit_msg>Change to work with django 1.7<commit_after>
|
try:
from django.conf.urls.defaults import *
except ImportError:
from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
from django.conf.urls.defaults import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
Change to work with django 1.7try:
from django.conf.urls.defaults import *
except ImportError:
from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
<commit_before>from django.conf.urls.defaults import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
<commit_msg>Change to work with django 1.7<commit_after>try:
from django.conf.urls.defaults import *
except ImportError:
from django.conf.urls import *
from notification.views import notices, mark_all_seen, feed_for_user, single, notice_settings
urlpatterns = patterns("",
url(r"^$", notices, name="notification_notices"),
url(r"^settings/$", notice_settings, name="notification_notice_settings"),
url(r"^(\d+)/$", single, name="notification_notice"),
url(r"^feed/$", feed_for_user, name="notification_feed_for_user"),
url(r"^mark_all_seen/$", mark_all_seen, name="notification_mark_all_seen"),
)
|
07ae4af01043887d584e3024d7d7e0aa093c85f4
|
intercom.py
|
intercom.py
|
import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
self.send_input = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
|
import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
else:
self.mumble_client.clear_input()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
|
Add clearing if not recording and remove useless member
|
Add clearing if not recording and remove useless member
|
Python
|
mit
|
pkronstrom/intercom
|
import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
self.send_input = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
Add clearing if not recording and remove useless member
|
import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
else:
self.mumble_client.clear_input()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
|
<commit_before>import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
self.send_input = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
<commit_msg>Add clearing if not recording and remove useless member<commit_after>
|
import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
else:
self.mumble_client.clear_input()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
|
import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
self.send_input = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
Add clearing if not recording and remove useless memberimport configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
else:
self.mumble_client.clear_input()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
|
<commit_before>import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
self.send_input = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
<commit_msg>Add clearing if not recording and remove useless member<commit_after>import configparser
import time
import RPi.GPIO as GPIO
from client import MumbleClient
class InterCom:
def __init__(self):
config = configparser.ConfigParser()
config.read('intercom.ini')
self.mumble_client = MumbleClient(config['mumbleclient'])
self.exit = False
if config['general']['gpiotype'] == 'BCM':
GPIO.setmode(GPIO.BCM)
self.button = int(config['general']['button'])
GPIO.setup(self.button, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
def run(self):
while not self.exit:
if GPIO.input(self.button):
self.mumble_client.send_input_audio()
else:
self.mumble_client.clear_input()
if __name__ == '__main__':
try:
InterCom().run()
except Exception as e:
raise e
finally:
GPIO.cleanup()
|
094380f4e30608713de549389adc1657f55b97b6
|
UCP/login/serializers.py
|
UCP/login/serializers.py
|
from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
|
from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
def create(self, validated_data):
user = User(email=validated_data['email'], username=validated_data['username'])
user.set_password(validated_data['password'])
user.save()
return user
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
|
Fix saving passwords in Register API
|
Fix saving passwords in Register API
override the create method in UserSerializer and set password there
|
Python
|
bsd-3-clause
|
BuildmLearn/University-Campus-Portal-UCP,BuildmLearn/University-Campus-Portal-UCP,BuildmLearn/University-Campus-Portal-UCP
|
from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
Fix saving passwords in Register API
override the create method in UserSerializer and set password there
|
from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
def create(self, validated_data):
user = User(email=validated_data['email'], username=validated_data['username'])
user.set_password(validated_data['password'])
user.save()
return user
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
|
<commit_before>from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
<commit_msg>Fix saving passwords in Register API
override the create method in UserSerializer and set password there<commit_after>
|
from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
def create(self, validated_data):
user = User(email=validated_data['email'], username=validated_data['username'])
user.set_password(validated_data['password'])
user.save()
return user
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
|
from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
Fix saving passwords in Register API
override the create method in UserSerializer and set password therefrom rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
def create(self, validated_data):
user = User(email=validated_data['email'], username=validated_data['username'])
user.set_password(validated_data['password'])
user.save()
return user
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
|
<commit_before>from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
<commit_msg>Fix saving passwords in Register API
override the create method in UserSerializer and set password there<commit_after>from rest_framework import serializers
from django.contrib.auth.models import User
from login.models import UserProfile
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password')
write_only_fields = ('password',)
read_only_fields = ('id',)
def create(self, validated_data):
user = User(email=validated_data['email'], username=validated_data['username'])
user.set_password(validated_data['password'])
user.save()
return user
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id', 'first_name', 'last_name', 'designation', 'profile_image')
read_only_fields = ('id',)
|
8db1207cc8564fff8fb739b627932ea3ce4785fc
|
app/gbi_server/forms/wfs.py
|
app/gbi_server/forms/wfs.py
|
# This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[
validators.Regexp(regex='^[\w\- ]+$', message=_l('Only alphanummeric lowercase characters are allowed!')),
])
add_form = HiddenField()
|
# This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[validators.Required(),])
add_form = HiddenField()
|
Allow all characters for layer title
|
Allow all characters for layer title
|
Python
|
apache-2.0
|
omniscale/gbi-server,omniscale/gbi-server,omniscale/gbi-server
|
# This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[
validators.Regexp(regex='^[\w\- ]+$', message=_l('Only alphanummeric lowercase characters are allowed!')),
])
add_form = HiddenField()Allow all characters for layer title
|
# This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[validators.Required(),])
add_form = HiddenField()
|
<commit_before># This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[
validators.Regexp(regex='^[\w\- ]+$', message=_l('Only alphanummeric lowercase characters are allowed!')),
])
add_form = HiddenField()<commit_msg>Allow all characters for layer title<commit_after>
|
# This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[validators.Required(),])
add_form = HiddenField()
|
# This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[
validators.Regexp(regex='^[\w\- ]+$', message=_l('Only alphanummeric lowercase characters are allowed!')),
])
add_form = HiddenField()Allow all characters for layer title# This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[validators.Required(),])
add_form = HiddenField()
|
<commit_before># This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[
validators.Regexp(regex='^[\w\- ]+$', message=_l('Only alphanummeric lowercase characters are allowed!')),
])
add_form = HiddenField()<commit_msg>Allow all characters for layer title<commit_after># This file is part of the GBI project.
# Copyright (C) 2013 Omniscale GmbH & Co. KG <http://omniscale.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import request
from flask.ext.wtf import SelectField, HiddenField, TextField, validators
from flask.ext.babel import lazy_gettext as _l
from .base import Form
class WFSEditForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('edit_form')
layer = SelectField(_l('wfs_layer'))
external_editor = HiddenField()
edit_form = HiddenField()
class WFSAddLayerForm(Form):
def is_submitted(self):
return request and request.method in ("PUT", "POST") and request.form.get('add_form')
new_layer = TextField(_l('wfs_new_layer_name'), validators=[validators.Required(),])
add_form = HiddenField()
|
382d304a3f8a4a1d2a396074836ed6e951245800
|
cropList.py
|
cropList.py
|
#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('imageNames = [\n')
js_file.write(',\n'.join(['\t"%s"' % name for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n\t];\n')
js_file.close()
|
#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('var imageNames = [\n')
js_file.write(',\n'.join(['\t"{}"'.format(name) for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n];\n')
js_file.close()
|
Declare imageNames with var, use .format() instead of %, remove tab before closing bracket
|
Declare imageNames with var, use .format() instead of %, remove tab before closing bracket
|
Python
|
mit
|
nightjuggler/pig,nightjuggler/pig,nightjuggler/pig
|
#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('imageNames = [\n')
js_file.write(',\n'.join(['\t"%s"' % name for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n\t];\n')
js_file.close()
Declare imageNames with var, use .format() instead of %, remove tab before closing bracket
|
#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('var imageNames = [\n')
js_file.write(',\n'.join(['\t"{}"'.format(name) for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n];\n')
js_file.close()
|
<commit_before>#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('imageNames = [\n')
js_file.write(',\n'.join(['\t"%s"' % name for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n\t];\n')
js_file.close()
<commit_msg>Declare imageNames with var, use .format() instead of %, remove tab before closing bracket<commit_after>
|
#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('var imageNames = [\n')
js_file.write(',\n'.join(['\t"{}"'.format(name) for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n];\n')
js_file.close()
|
#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('imageNames = [\n')
js_file.write(',\n'.join(['\t"%s"' % name for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n\t];\n')
js_file.close()
Declare imageNames with var, use .format() instead of %, remove tab before closing bracket#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('var imageNames = [\n')
js_file.write(',\n'.join(['\t"{}"'.format(name) for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n];\n')
js_file.close()
|
<commit_before>#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('imageNames = [\n')
js_file.write(',\n'.join(['\t"%s"' % name for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n\t];\n')
js_file.close()
<commit_msg>Declare imageNames with var, use .format() instead of %, remove tab before closing bracket<commit_after>#!/usr/bin/python
import os
if __name__ == '__main__':
js_file = open('cropList.js', 'w')
js_file.write('var imageNames = [\n')
js_file.write(',\n'.join(['\t"{}"'.format(name) for name in os.listdir('originals')
if name[-4:] in ('.JPG', '.jpg', '.PNG', '.png')]))
js_file.write('\n];\n')
js_file.close()
|
abe30517c0a16cb54977979ab4a90c3fd841f801
|
modules/tools.py
|
modules/tools.py
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
for callback in self.callbacks:
callback()
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
|
Add a has_expired() method to the Timer class.
|
Add a has_expired() method to the Timer class.
|
Python
|
mit
|
kxgames/kxg
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
for callback in self.callbacks:
callback()
Add a has_expired() method to the Timer class.
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
|
<commit_before>inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
for callback in self.callbacks:
callback()
<commit_msg>Add a has_expired() method to the Timer class.<commit_after>
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
|
inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
for callback in self.callbacks:
callback()
Add a has_expired() method to the Timer class.inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
|
<commit_before>inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
for callback in self.callbacks:
callback()
<commit_msg>Add a has_expired() method to the Timer class.<commit_after>inf = infinity = float('inf')
class Timer:
def __init__(self, duration, *callbacks):
self.duration = duration
self.callbacks = list(callbacks)
self.elapsed = 0
self.expired = False
self.paused = False
def register(self, callback):
self.callbacks.append(callback)
def unregister(self, callback):
self.callbacks.remove(callback)
def pause(self):
self.paused = True
def unpause(self):
self.pause = False
def update(self, time):
if self.expired:
return
if self.elapsed > self.duration:
return
self.elapsed += time
if self.elapsed > self.duration:
self.expired = True
for callback in self.callbacks:
callback()
def has_expired(self):
return self.expired
|
e5b879a9b56f6a03a9ccec8eb5a2496de3ffe4ac
|
pairwise/pairwise_theano.py
|
pairwise/pairwise_theano.py
|
# Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_tensor_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
|
# Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_broadcast_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
|
Set the Theano case name closer to the numpy test name.
|
Set the Theano case name closer to the numpy test name.
|
Python
|
mit
|
numfocus/python-benchmarks,numfocus/python-benchmarks
|
# Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_tensor_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
Set the Theano case name closer to the numpy test name.
|
# Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_broadcast_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
|
<commit_before># Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_tensor_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
<commit_msg>Set the Theano case name closer to the numpy test name.<commit_after>
|
# Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_broadcast_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
|
# Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_tensor_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
Set the Theano case name closer to the numpy test name.# Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_broadcast_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
|
<commit_before># Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_tensor_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
<commit_msg>Set the Theano case name closer to the numpy test name.<commit_after># Authors: James Bergstra
# License: MIT
import theano
import theano.tensor as TT
def pairwise_theano_tensor_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
dists = TT.sqrt(
TT.sum(
TT.sqr(X[:, None, :] - X),
axis=2))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_broadcast_' + dtype
return rval
def pairwise_theano_blas_prepare(dtype):
X = TT.matrix(dtype=str(dtype))
X_norm_2 = (X ** 2).sum(axis=1)
dists = TT.sqrt(2 * X_norm_2 - TT.dot(X, X.T))
rval = theano.function([X],
theano.Out(dists, borrow=True),
allow_input_downcast=True)
rval.__name__ = 'pairwise_theano_blas_' + dtype
return rval
benchmarks = (
pairwise_theano_tensor_prepare('float32'),
pairwise_theano_tensor_prepare('float64'),
pairwise_theano_blas_prepare('float32'),
pairwise_theano_blas_prepare('float64'),
)
|
661299275942813a0c45aa90db64c9603d287839
|
lib_common/src/d1_common/iter/string.py
|
lib_common/src/d1_common/iter/string.py
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
def __len__(self):
return len(self._string)
@property
def size(self):
return len(self._string)
|
Improve StringIterator to allow for more general usage
|
Improve StringIterator to allow for more general usage
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
Improve StringIterator to allow for more general usage
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
def __len__(self):
return len(self._string)
@property
def size(self):
return len(self._string)
|
<commit_before># -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
<commit_msg>Improve StringIterator to allow for more general usage<commit_after>
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
def __len__(self):
return len(self._string)
@property
def size(self):
return len(self._string)
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
Improve StringIterator to allow for more general usage# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
def __len__(self):
return len(self._string)
@property
def size(self):
return len(self._string)
|
<commit_before># -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
<commit_msg>Improve StringIterator to allow for more general usage<commit_after># -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Iterate over string"""
from __future__ import absolute_import
import StringIO
import d1_common.const
class StringIterator(object):
"""Generator that returns the bytes of a string in chunks"""
def __init__(self, string, chunk_size=d1_common.const.DEFAULT_CHUNK_SIZE):
self._string = string
self._chunk_size = chunk_size
def __iter__(self):
f = StringIO.StringIO(self._string)
while True:
chunk_str = f.read(self._chunk_size)
if not chunk_str:
break
yield chunk_str
def __len__(self):
return len(self._string)
@property
def size(self):
return len(self._string)
|
0b6b90a91390551fffebacea55e9cccb4fa3d277
|
capmetrics_etl/cli.py
|
capmetrics_etl/cli.py
|
import click
from . import etl
@click.command()
@click.option('--test', default=False)
def run(test):
if not test:
etl.run_excel_etl()
else:
click.echo('Capmetrics test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
|
import click
import configparser
import json
from . import etl
def parse_capmetrics_configuration(config_parser):
worksheet_names = json.loads(config_parser['capmetrics']['worksheet_names'])
capmetrics_configuration = {
'timezone': 'America/Chicago',
'engine': config_parser['capmetrics']['engine'],
'worksheet_names': worksheet_names
}
if 'timezone' in config_parser['capmetrics']:
capmetrics_configuration['timezone'] = config_parser['capmetrics']['timezone']
return capmetrics_configuration
@click.command()
@click.argument('config')
@click.option('--test', default=False)
def run(config, test):
if not test:
config_parser = configparser.ConfigParser()
# make parsing of config file names case-sensitive
config_parser.optionxform = str
config_parser.read(config)
capmetrics_configuration = parse_capmetrics_configuration(config_parser)
etl.run_excel_etl(capmetrics_configuration)
else:
click.echo('Capmetrics CLI test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
|
Add configuration parsing to CLI.
|
Add configuration parsing to CLI.
|
Python
|
mit
|
jga/capmetrics-etl,jga/capmetrics-etl
|
import click
from . import etl
@click.command()
@click.option('--test', default=False)
def run(test):
if not test:
etl.run_excel_etl()
else:
click.echo('Capmetrics test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
Add configuration parsing to CLI.
|
import click
import configparser
import json
from . import etl
def parse_capmetrics_configuration(config_parser):
worksheet_names = json.loads(config_parser['capmetrics']['worksheet_names'])
capmetrics_configuration = {
'timezone': 'America/Chicago',
'engine': config_parser['capmetrics']['engine'],
'worksheet_names': worksheet_names
}
if 'timezone' in config_parser['capmetrics']:
capmetrics_configuration['timezone'] = config_parser['capmetrics']['timezone']
return capmetrics_configuration
@click.command()
@click.argument('config')
@click.option('--test', default=False)
def run(config, test):
if not test:
config_parser = configparser.ConfigParser()
# make parsing of config file names case-sensitive
config_parser.optionxform = str
config_parser.read(config)
capmetrics_configuration = parse_capmetrics_configuration(config_parser)
etl.run_excel_etl(capmetrics_configuration)
else:
click.echo('Capmetrics CLI test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
|
<commit_before>import click
from . import etl
@click.command()
@click.option('--test', default=False)
def run(test):
if not test:
etl.run_excel_etl()
else:
click.echo('Capmetrics test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
<commit_msg>Add configuration parsing to CLI.<commit_after>
|
import click
import configparser
import json
from . import etl
def parse_capmetrics_configuration(config_parser):
worksheet_names = json.loads(config_parser['capmetrics']['worksheet_names'])
capmetrics_configuration = {
'timezone': 'America/Chicago',
'engine': config_parser['capmetrics']['engine'],
'worksheet_names': worksheet_names
}
if 'timezone' in config_parser['capmetrics']:
capmetrics_configuration['timezone'] = config_parser['capmetrics']['timezone']
return capmetrics_configuration
@click.command()
@click.argument('config')
@click.option('--test', default=False)
def run(config, test):
if not test:
config_parser = configparser.ConfigParser()
# make parsing of config file names case-sensitive
config_parser.optionxform = str
config_parser.read(config)
capmetrics_configuration = parse_capmetrics_configuration(config_parser)
etl.run_excel_etl(capmetrics_configuration)
else:
click.echo('Capmetrics CLI test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
|
import click
from . import etl
@click.command()
@click.option('--test', default=False)
def run(test):
if not test:
etl.run_excel_etl()
else:
click.echo('Capmetrics test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
Add configuration parsing to CLI.import click
import configparser
import json
from . import etl
def parse_capmetrics_configuration(config_parser):
worksheet_names = json.loads(config_parser['capmetrics']['worksheet_names'])
capmetrics_configuration = {
'timezone': 'America/Chicago',
'engine': config_parser['capmetrics']['engine'],
'worksheet_names': worksheet_names
}
if 'timezone' in config_parser['capmetrics']:
capmetrics_configuration['timezone'] = config_parser['capmetrics']['timezone']
return capmetrics_configuration
@click.command()
@click.argument('config')
@click.option('--test', default=False)
def run(config, test):
if not test:
config_parser = configparser.ConfigParser()
# make parsing of config file names case-sensitive
config_parser.optionxform = str
config_parser.read(config)
capmetrics_configuration = parse_capmetrics_configuration(config_parser)
etl.run_excel_etl(capmetrics_configuration)
else:
click.echo('Capmetrics CLI test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
|
<commit_before>import click
from . import etl
@click.command()
@click.option('--test', default=False)
def run(test):
if not test:
etl.run_excel_etl()
else:
click.echo('Capmetrics test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
<commit_msg>Add configuration parsing to CLI.<commit_after>import click
import configparser
import json
from . import etl
def parse_capmetrics_configuration(config_parser):
worksheet_names = json.loads(config_parser['capmetrics']['worksheet_names'])
capmetrics_configuration = {
'timezone': 'America/Chicago',
'engine': config_parser['capmetrics']['engine'],
'worksheet_names': worksheet_names
}
if 'timezone' in config_parser['capmetrics']:
capmetrics_configuration['timezone'] = config_parser['capmetrics']['timezone']
return capmetrics_configuration
@click.command()
@click.argument('config')
@click.option('--test', default=False)
def run(config, test):
if not test:
config_parser = configparser.ConfigParser()
# make parsing of config file names case-sensitive
config_parser.optionxform = str
config_parser.read(config)
capmetrics_configuration = parse_capmetrics_configuration(config_parser)
etl.run_excel_etl(capmetrics_configuration)
else:
click.echo('Capmetrics CLI test.')
# Call run function when module deployed as script. This is approach is common
# within the python community
if __name__ == '__main__':
run()
|
3d1774aeb21b38e7cbb677228aed25e36374d560
|
basics/candidate.py
|
basics/candidate.py
|
import numpy as np
class Candidate(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, mask, img_coords):
super(Candidate, self).__init__()
self.mask = mask
self.img_coords = img_coords
self._parent = None
self._child = None
def get_props(self):
'''
Properties of the bubble candidate.
'''
self._size = self.mask.sum()
self._pa = None
self._major = None
self._minor = None
@property
def size(self):
return self._size
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
class CandidateInteraction(object):
"""
Common properties between candidates based on their hierarchal structure
"""
def __init__(self, candidate1, candidate2):
super(CandidateInteraction, self).__init__()
self.candidate1 = candidate1
self.candidate2 = candidate2
|
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._channel = props[0]
self._y = props[1]
self._x = props[2]
self._major = props[3]
self._minor = props[4]
self._pa = props[5]
@property
def params(self):
return [self._channel, self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
|
Update class for 2D bubbles
|
Update class for 2D bubbles
|
Python
|
mit
|
e-koch/BaSiCs
|
import numpy as np
class Candidate(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, mask, img_coords):
super(Candidate, self).__init__()
self.mask = mask
self.img_coords = img_coords
self._parent = None
self._child = None
def get_props(self):
'''
Properties of the bubble candidate.
'''
self._size = self.mask.sum()
self._pa = None
self._major = None
self._minor = None
@property
def size(self):
return self._size
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
class CandidateInteraction(object):
"""
Common properties between candidates based on their hierarchal structure
"""
def __init__(self, candidate1, candidate2):
super(CandidateInteraction, self).__init__()
self.candidate1 = candidate1
self.candidate2 = candidate2
Update class for 2D bubbles
|
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._channel = props[0]
self._y = props[1]
self._x = props[2]
self._major = props[3]
self._minor = props[4]
self._pa = props[5]
@property
def params(self):
return [self._channel, self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
|
<commit_before>
import numpy as np
class Candidate(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, mask, img_coords):
super(Candidate, self).__init__()
self.mask = mask
self.img_coords = img_coords
self._parent = None
self._child = None
def get_props(self):
'''
Properties of the bubble candidate.
'''
self._size = self.mask.sum()
self._pa = None
self._major = None
self._minor = None
@property
def size(self):
return self._size
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
class CandidateInteraction(object):
"""
Common properties between candidates based on their hierarchal structure
"""
def __init__(self, candidate1, candidate2):
super(CandidateInteraction, self).__init__()
self.candidate1 = candidate1
self.candidate2 = candidate2
<commit_msg>Update class for 2D bubbles<commit_after>
|
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._channel = props[0]
self._y = props[1]
self._x = props[2]
self._major = props[3]
self._minor = props[4]
self._pa = props[5]
@property
def params(self):
return [self._channel, self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
|
import numpy as np
class Candidate(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, mask, img_coords):
super(Candidate, self).__init__()
self.mask = mask
self.img_coords = img_coords
self._parent = None
self._child = None
def get_props(self):
'''
Properties of the bubble candidate.
'''
self._size = self.mask.sum()
self._pa = None
self._major = None
self._minor = None
@property
def size(self):
return self._size
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
class CandidateInteraction(object):
"""
Common properties between candidates based on their hierarchal structure
"""
def __init__(self, candidate1, candidate2):
super(CandidateInteraction, self).__init__()
self.candidate1 = candidate1
self.candidate2 = candidate2
Update class for 2D bubbles
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._channel = props[0]
self._y = props[1]
self._x = props[2]
self._major = props[3]
self._minor = props[4]
self._pa = props[5]
@property
def params(self):
return [self._channel, self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
|
<commit_before>
import numpy as np
class Candidate(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, mask, img_coords):
super(Candidate, self).__init__()
self.mask = mask
self.img_coords = img_coords
self._parent = None
self._child = None
def get_props(self):
'''
Properties of the bubble candidate.
'''
self._size = self.mask.sum()
self._pa = None
self._major = None
self._minor = None
@property
def size(self):
return self._size
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
class CandidateInteraction(object):
"""
Common properties between candidates based on their hierarchal structure
"""
def __init__(self, candidate1, candidate2):
super(CandidateInteraction, self).__init__()
self.candidate1 = candidate1
self.candidate2 = candidate2
<commit_msg>Update class for 2D bubbles<commit_after>
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._channel = props[0]
self._y = props[1]
self._x = props[2]
self._major = props[3]
self._minor = props[4]
self._pa = props[5]
@property
def params(self):
return [self._channel, self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
|
c9d8833d59ae4858cfba69e44c1e8aaa5dd07df9
|
tests/test_create_template.py
|
tests/test_create_template.py
|
# -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
|
# -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
from cookiecutter.main import cookiecutter
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_cli_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
def test_run_cookiecutter_and_plugin_tests(testdir):
cookiecutter(TEMPLATE, no_input=True)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
|
Add a test that uses the cookiecutter python API
|
Add a test that uses the cookiecutter python API
|
Python
|
mit
|
luzfcb/cookiecutter-pytest-plugin,pytest-dev/cookiecutter-pytest-plugin,s0undt3ch/cookiecutter-pytest-plugin
|
# -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
Add a test that uses the cookiecutter python API
|
# -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
from cookiecutter.main import cookiecutter
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_cli_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
def test_run_cookiecutter_and_plugin_tests(testdir):
cookiecutter(TEMPLATE, no_input=True)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
|
<commit_before># -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
<commit_msg>Add a test that uses the cookiecutter python API<commit_after>
|
# -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
from cookiecutter.main import cookiecutter
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_cli_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
def test_run_cookiecutter_and_plugin_tests(testdir):
cookiecutter(TEMPLATE, no_input=True)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
|
# -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
Add a test that uses the cookiecutter python API# -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
from cookiecutter.main import cookiecutter
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_cli_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
def test_run_cookiecutter_and_plugin_tests(testdir):
cookiecutter(TEMPLATE, no_input=True)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
|
<commit_before># -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
<commit_msg>Add a test that uses the cookiecutter python API<commit_after># -*- coding: utf-8 -*-
"""
test_create_template
--------------------
"""
import os
import pip
import pytest
import shutil
import subprocess
from cookiecutter.main import cookiecutter
TEMPLATE = os.path.realpath('.')
@pytest.fixture(autouse=True)
def clean_tmp_dir(tmpdir, request):
"""
Remove the project directory that is created by cookiecutter during the
tests.
"""
tmp_cwd = tmpdir.mkdir('cookiecutter_out')
os.chdir(str(tmp_cwd))
def remove_project_dir():
if os.path.isdir('pytest-foobar'):
shutil.rmtree('pytest-foobar')
request.addfinalizer(remove_project_dir)
def test_run_cookiecutter_cli_and_plugin_tests(testdir):
try:
subprocess.check_call(['cookiecutter', '--no-input', TEMPLATE])
except subprocess.CalledProcessError as e:
pytest.fail(e)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
def test_run_cookiecutter_and_plugin_tests(testdir):
cookiecutter(TEMPLATE, no_input=True)
project_root = 'pytest-foobar'
assert os.path.isdir(project_root)
os.chdir(str(project_root))
pip.main(['install', '.'])
if testdir.runpytest().ret != 0:
pytest.fail('Error running the tests of the newly generated plugin')
|
cf05e02a1efc1bf680904df5f34eb783131b37bb
|
db/sql_server/pyodbc.py
|
db/sql_server/pyodbc.py
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
Add column support for sql server
|
Add column support for sql server
--HG--
extra : convert_revision : svn%3A69d324d9-c39d-4fdc-8679-7745eae9e2c8/trunk%40111
|
Python
|
apache-2.0
|
philipn/django-south,RaD/django-south,RaD/django-south,philipn/django-south,RaD/django-south,nimnull/django-south,nimnull/django-south
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
Add column support for sql server
--HG--
extra : convert_revision : svn%3A69d324d9-c39d-4fdc-8679-7745eae9e2c8/trunk%40111
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
<commit_before>from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
<commit_msg>Add column support for sql server
--HG--
extra : convert_revision : svn%3A69d324d9-c39d-4fdc-8679-7745eae9e2c8/trunk%40111<commit_after>
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
Add column support for sql server
--HG--
extra : convert_revision : svn%3A69d324d9-c39d-4fdc-8679-7745eae9e2c8/trunk%40111from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
<commit_before>from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
<commit_msg>Add column support for sql server
--HG--
extra : convert_revision : svn%3A69d324d9-c39d-4fdc-8679-7745eae9e2c8/trunk%40111<commit_after>from django.db import connection
from django.db.models.fields import *
from south.db import generic
class DatabaseOperations(generic.DatabaseOperations):
"""
django-pyodbc (sql_server.pyodbc) implementation of database operations.
"""
add_column_string = 'ALTER TABLE %s ADD %s;'
def create_table(self, table_name, fields):
# Tweak stuff as needed
for name,f in fields:
if isinstance(f, BooleanField):
if f.default == True:
f.default = 1
if f.default == False:
f.default = 0
# Run
generic.DatabaseOperations.create_table(self, table_name, fields)
|
0213bbb8f8075b2dc36a33380a66932c9d541f63
|
src/sphobjinv/__init__.py
|
src/sphobjinv/__init__.py
|
r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import (
SphobjinvError,
VersionError,
)
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
|
r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import SphobjinvError, VersionError
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
|
Clean up the error imports
|
Clean up the error imports
The new errors that had been added for _intersphinx.py had left
the sphobjinv.error import line split. No need, when it all fits on
one line.
|
Python
|
mit
|
bskinn/sphobjinv
|
r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import (
SphobjinvError,
VersionError,
)
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
Clean up the error imports
The new errors that had been added for _intersphinx.py had left
the sphobjinv.error import line split. No need, when it all fits on
one line.
|
r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import SphobjinvError, VersionError
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
|
<commit_before>r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import (
SphobjinvError,
VersionError,
)
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
<commit_msg>Clean up the error imports
The new errors that had been added for _intersphinx.py had left
the sphobjinv.error import line split. No need, when it all fits on
one line.<commit_after>
|
r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import SphobjinvError, VersionError
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
|
r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import (
SphobjinvError,
VersionError,
)
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
Clean up the error imports
The new errors that had been added for _intersphinx.py had left
the sphobjinv.error import line split. No need, when it all fits on
one line.r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import SphobjinvError, VersionError
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
|
<commit_before>r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import (
SphobjinvError,
VersionError,
)
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
<commit_msg>Clean up the error imports
The new errors that had been added for _intersphinx.py had left
the sphobjinv.error import line split. No need, when it all fits on
one line.<commit_after>r"""``sphobjinv`` *package definition module*.
``sphobjinv`` is a toolkit for manipulation and inspection of
Sphinx |objects.inv| files.
**Author**
Brian Skinn (bskinn@alum.mit.edu)
**File Created**
17 May 2016
**Copyright**
\(c) Brian Skinn 2016-2022
**Source Repository**
https://github.com/bskinn/sphobjinv
**Documentation**
https://sphobjinv.readthedocs.io/en/latest
**License**
The MIT License; see |license_txt|_ for full license terms
**Members**
"""
from sphobjinv.data import DataFields, DataObjBytes, DataObjStr
from sphobjinv.enum import HeaderFields, SourceTypes
from sphobjinv.error import SphobjinvError, VersionError
from sphobjinv.fileops import readbytes, readjson, urlwalk, writebytes, writejson
from sphobjinv.inventory import Inventory
from sphobjinv.re import p_data, pb_comments, pb_data, pb_project, pb_version
from sphobjinv.schema import json_schema
from sphobjinv.version import __version__
from sphobjinv.zlib import compress, decompress
|
63b3184aeae800795775928a67bf7567b487cba3
|
tools/windows/eclipse_make.py
|
tools/windows/eclipse_make.py
|
#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode().strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode('utf-8').strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
|
Fix eclipse build: “UnicodeDecodeError: 'ascii' codec can't decode byte”
|
Fix eclipse build: “UnicodeDecodeError: 'ascii' codec can't decode byte”
Closes https://github.com/espressif/esp-idf/pull/6505
|
Python
|
apache-2.0
|
espressif/esp-idf,espressif/esp-idf,espressif/esp-idf,espressif/esp-idf
|
#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode().strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
Fix eclipse build: “UnicodeDecodeError: 'ascii' codec can't decode byte”
Closes https://github.com/espressif/esp-idf/pull/6505
|
#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode('utf-8').strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode().strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
<commit_msg>Fix eclipse build: “UnicodeDecodeError: 'ascii' codec can't decode byte”
Closes https://github.com/espressif/esp-idf/pull/6505<commit_after>
|
#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode('utf-8').strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode().strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
Fix eclipse build: “UnicodeDecodeError: 'ascii' codec can't decode byte”
Closes https://github.com/espressif/esp-idf/pull/6505#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode('utf-8').strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode().strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
<commit_msg>Fix eclipse build: “UnicodeDecodeError: 'ascii' codec can't decode byte”
Closes https://github.com/espressif/esp-idf/pull/6505<commit_after>#!/usr/bin/env python
#
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
# to Windows paths, for Eclipse
from __future__ import division, print_function
import os.path
import re
import subprocess
import sys
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
paths = {}
def check_path(path):
try:
return paths[path]
except KeyError:
pass
paths[path] = path # cache as failed, replace with success if it works
try:
winpath = subprocess.check_output(['cygpath', '-w', path]).decode('utf-8').strip()
except subprocess.CalledProcessError:
return path # something went wrong running cygpath, assume this is not a path!
if not os.path.exists(winpath):
return path # not actually a valid path
winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere
paths[path] = winpath
return winpath
def main():
print("Running make in '%s'" % check_path(os.getcwd()))
make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE)
for line in iter(make.stdout.readline, ''):
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
print(line.rstrip())
sys.exit(make.wait())
if __name__ == '__main__':
main()
|
a3e5f553fdf8dffa894d7ba3b89fefd0019653fe
|
oscar/apps/customer/alerts/receivers.py
|
oscar/apps/customer/alerts/receivers.py
|
from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if created:
ProductAlert = get_model('customer', 'ProductAlert')
alerts = ProductAlert.objects.filter(email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
|
from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.db import connection
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if not created:
return
ProductAlert = get_model('customer', 'ProductAlert')
# This signal will be raised when creating a superuser as part of syncdb,
# at which point only a subset of tables will be created. Thus, we test if
# the alert table exists before trying to exercise the ORM.
table = ProductAlert._meta.db_table
if table in connection.introspection.table_names():
alerts = ProductAlert.objects.filter(
email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
|
Adjust post-user-create receiver to check database state
|
Adjust post-user-create receiver to check database state
Need to avoid situation where this signal gets raised by syncdb and the
database isn't in the correct state.
Fixes #475
|
Python
|
bsd-3-clause
|
rocopartners/django-oscar,marcoantoniooliveira/labweb,Idematica/django-oscar,rocopartners/django-oscar,adamend/django-oscar,makielab/django-oscar,michaelkuty/django-oscar,django-oscar/django-oscar,amirrpp/django-oscar,adamend/django-oscar,jinnykoo/wuyisj.com,jinnykoo/christmas,binarydud/django-oscar,jinnykoo/wuyisj.com,okfish/django-oscar,pdonadeo/django-oscar,makielab/django-oscar,thechampanurag/django-oscar,lijoantony/django-oscar,django-oscar/django-oscar,mexeniz/django-oscar,sasha0/django-oscar,dongguangming/django-oscar,pdonadeo/django-oscar,marcoantoniooliveira/labweb,machtfit/django-oscar,eddiep1101/django-oscar,bschuon/django-oscar,jmt4/django-oscar,sonofatailor/django-oscar,thechampanurag/django-oscar,WadeYuChen/django-oscar,saadatqadri/django-oscar,thechampanurag/django-oscar,WillisXChen/django-oscar,kapt/django-oscar,bschuon/django-oscar,bnprk/django-oscar,WadeYuChen/django-oscar,marcoantoniooliveira/labweb,QLGu/django-oscar,amirrpp/django-oscar,pasqualguerrero/django-oscar,michaelkuty/django-oscar,QLGu/django-oscar,taedori81/django-oscar,Idematica/django-oscar,saadatqadri/django-oscar,jinnykoo/wuyisj,mexeniz/django-oscar,saadatqadri/django-oscar,ka7eh/django-oscar,binarydud/django-oscar,jinnykoo/christmas,bschuon/django-oscar,nickpack/django-oscar,faratro/django-oscar,michaelkuty/django-oscar,josesanch/django-oscar,okfish/django-oscar,manevant/django-oscar,jinnykoo/christmas,okfish/django-oscar,jmt4/django-oscar,DrOctogon/unwash_ecom,QLGu/django-oscar,taedori81/django-oscar,spartonia/django-oscar,rocopartners/django-oscar,saadatqadri/django-oscar,eddiep1101/django-oscar,josesanch/django-oscar,manevant/django-oscar,jmt4/django-oscar,dongguangming/django-oscar,sasha0/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj,MatthewWilkes/django-oscar,WillisXChen/django-oscar,ahmetdaglarbas/e-commerce,elliotthill/django-oscar,okfish/django-oscar,spartonia/django-oscar,manevant/django-oscar,vovanbo/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,thechampanurag/django-oscar,ka7eh/django-oscar,manevant/django-oscar,vovanbo/django-oscar,lijoantony/django-oscar,spartonia/django-oscar,bnprk/django-oscar,kapari/django-oscar,jlmadurga/django-oscar,Jannes123/django-oscar,makielab/django-oscar,adamend/django-oscar,pasqualguerrero/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj,WillisXChen/django-oscar,vovanbo/django-oscar,ahmetdaglarbas/e-commerce,john-parton/django-oscar,kapari/django-oscar,anentropic/django-oscar,taedori81/django-oscar,makielab/django-oscar,solarissmoke/django-oscar,DrOctogon/unwash_ecom,dongguangming/django-oscar,DrOctogon/unwash_ecom,WillisXChen/django-oscar,kapari/django-oscar,faratro/django-oscar,vovanbo/django-oscar,django-oscar/django-oscar,nfletton/django-oscar,WillisXChen/django-oscar,sonofatailor/django-oscar,rocopartners/django-oscar,ka7eh/django-oscar,amirrpp/django-oscar,pdonadeo/django-oscar,jlmadurga/django-oscar,jmt4/django-oscar,pasqualguerrero/django-oscar,nfletton/django-oscar,eddiep1101/django-oscar,Bogh/django-oscar,jlmadurga/django-oscar,taedori81/django-oscar,pdonadeo/django-oscar,michaelkuty/django-oscar,QLGu/django-oscar,anentropic/django-oscar,elliotthill/django-oscar,solarissmoke/django-oscar,ademuk/django-oscar,dongguangming/django-oscar,kapt/django-oscar,josesanch/django-oscar,nfletton/django-oscar,monikasulik/django-oscar,nickpack/django-oscar,kapt/django-oscar,Jannes123/django-oscar,marcoantoniooliveira/labweb,ahmetdaglarbas/e-commerce,faratro/django-oscar,nickpack/django-oscar,sasha0/django-oscar,monikasulik/django-oscar,solarissmoke/django-oscar,bschuon/django-oscar,MatthewWilkes/django-oscar,binarydud/django-oscar,john-parton/django-oscar,binarydud/django-oscar,ademuk/django-oscar,jlmadurga/django-oscar,machtfit/django-oscar,anentropic/django-oscar,itbabu/django-oscar,amirrpp/django-oscar,Jannes123/django-oscar,kapari/django-oscar,monikasulik/django-oscar,ahmetdaglarbas/e-commerce,itbabu/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,WadeYuChen/django-oscar,adamend/django-oscar,monikasulik/django-oscar,john-parton/django-oscar,itbabu/django-oscar,lijoantony/django-oscar,sonofatailor/django-oscar,anentropic/django-oscar,jinnykoo/wuyisj.com,Jannes123/django-oscar,ka7eh/django-oscar,elliotthill/django-oscar,mexeniz/django-oscar,django-oscar/django-oscar,bnprk/django-oscar,Bogh/django-oscar,jinnykoo/wuyisj.com,lijoantony/django-oscar,nfletton/django-oscar,ademuk/django-oscar,bnprk/django-oscar,john-parton/django-oscar,pasqualguerrero/django-oscar,Bogh/django-oscar,Bogh/django-oscar,machtfit/django-oscar,eddiep1101/django-oscar,ademuk/django-oscar,Idematica/django-oscar,nickpack/django-oscar,WadeYuChen/django-oscar,faratro/django-oscar,MatthewWilkes/django-oscar,sonofatailor/django-oscar,MatthewWilkes/django-oscar,jinnykoo/wuyisj
|
from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if created:
ProductAlert = get_model('customer', 'ProductAlert')
alerts = ProductAlert.objects.filter(email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
Adjust post-user-create receiver to check database state
Need to avoid situation where this signal gets raised by syncdb and the
database isn't in the correct state.
Fixes #475
|
from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.db import connection
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if not created:
return
ProductAlert = get_model('customer', 'ProductAlert')
# This signal will be raised when creating a superuser as part of syncdb,
# at which point only a subset of tables will be created. Thus, we test if
# the alert table exists before trying to exercise the ORM.
table = ProductAlert._meta.db_table
if table in connection.introspection.table_names():
alerts = ProductAlert.objects.filter(
email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
|
<commit_before>from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if created:
ProductAlert = get_model('customer', 'ProductAlert')
alerts = ProductAlert.objects.filter(email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
<commit_msg>Adjust post-user-create receiver to check database state
Need to avoid situation where this signal gets raised by syncdb and the
database isn't in the correct state.
Fixes #475<commit_after>
|
from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.db import connection
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if not created:
return
ProductAlert = get_model('customer', 'ProductAlert')
# This signal will be raised when creating a superuser as part of syncdb,
# at which point only a subset of tables will be created. Thus, we test if
# the alert table exists before trying to exercise the ORM.
table = ProductAlert._meta.db_table
if table in connection.introspection.table_names():
alerts = ProductAlert.objects.filter(
email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
|
from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if created:
ProductAlert = get_model('customer', 'ProductAlert')
alerts = ProductAlert.objects.filter(email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
Adjust post-user-create receiver to check database state
Need to avoid situation where this signal gets raised by syncdb and the
database isn't in the correct state.
Fixes #475from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.db import connection
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if not created:
return
ProductAlert = get_model('customer', 'ProductAlert')
# This signal will be raised when creating a superuser as part of syncdb,
# at which point only a subset of tables will be created. Thus, we test if
# the alert table exists before trying to exercise the ORM.
table = ProductAlert._meta.db_table
if table in connection.introspection.table_names():
alerts = ProductAlert.objects.filter(
email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
|
<commit_before>from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if created:
ProductAlert = get_model('customer', 'ProductAlert')
alerts = ProductAlert.objects.filter(email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
<commit_msg>Adjust post-user-create receiver to check database state
Need to avoid situation where this signal gets raised by syncdb and the
database isn't in the correct state.
Fixes #475<commit_after>from django.conf import settings
from django.db.models import get_model
from django.db.models.signals import post_save
from django.db import connection
from django.contrib.auth.models import User
def send_product_alerts(sender, instance, created, **kwargs):
from oscar.apps.customer.alerts import utils
utils.send_product_alerts(instance.product)
def migrate_alerts_to_user(sender, instance, created, **kwargs):
"""
Transfer any active alerts linked to a user's email address to the newly
registered user.
"""
if not created:
return
ProductAlert = get_model('customer', 'ProductAlert')
# This signal will be raised when creating a superuser as part of syncdb,
# at which point only a subset of tables will be created. Thus, we test if
# the alert table exists before trying to exercise the ORM.
table = ProductAlert._meta.db_table
if table in connection.introspection.table_names():
alerts = ProductAlert.objects.filter(
email=instance.email, status=ProductAlert.ACTIVE)
alerts.update(user=instance, key=None, email=None)
post_save.connect(migrate_alerts_to_user, sender=User)
if settings.OSCAR_EAGER_ALERTS:
StockRecord = get_model('partner', 'StockRecord')
post_save.connect(send_product_alerts, sender=StockRecord)
|
a565235303e1f2572ed34490e25c7e0f31aba74c
|
turngeneration/serializers.py
|
turngeneration/serializers.py
|
from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_attribute(self, obj):
return obj
class RealmSerializer(serializers.Serializer):
content_type = ContentTypeField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
|
from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, value):
return u'{value.app_label}.{value.model}'.format(value=value)
def to_internal_value(self, data):
app_label, model = data.split('.')
return ContentType.objects.get_by_natural_key(app_label, model)
class ReadOnlyDefault(object):
def set_context(self, serializer_field):
self.current_value = getattr(serializer_field.parent.instance,
serializer_field.name, None)
def __call__(self):
return self.current_value
def __repr__(self):
return '%s()' % (self.__class__.__name__,)
class GeneratorSerializer(serializers.ModelSerializer):
content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault())
object_id = serializers.IntegerField(read_only=True,
default=ReadOnlyDefault())
class Meta(object):
model = models.Generator
fields = ('content_type', 'object_id',
'generating', 'generation_time', 'autogenerate',
'allow_pauses', 'minimum_between_generations')
read_only_fields = ('generating', 'generation_time')
class RealmSerializer(serializers.Serializer):
content_type = serializers.SerializerMethodField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
generator = serializers.SerializerMethodField(required=False)
def get_content_type(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_generator(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
generator = models.Generator.objects.get(
content_type=ct, object_id=obj.pk)
except models.Generator.DoesNotExist:
return None
return GeneratorSerializer(generator).data
|
Support nested generator inside the realm.
|
Support nested generator inside the realm.
|
Python
|
mit
|
jbradberry/django-turn-generation,jbradberry/django-turn-generation
|
from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_attribute(self, obj):
return obj
class RealmSerializer(serializers.Serializer):
content_type = ContentTypeField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
Support nested generator inside the realm.
|
from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, value):
return u'{value.app_label}.{value.model}'.format(value=value)
def to_internal_value(self, data):
app_label, model = data.split('.')
return ContentType.objects.get_by_natural_key(app_label, model)
class ReadOnlyDefault(object):
def set_context(self, serializer_field):
self.current_value = getattr(serializer_field.parent.instance,
serializer_field.name, None)
def __call__(self):
return self.current_value
def __repr__(self):
return '%s()' % (self.__class__.__name__,)
class GeneratorSerializer(serializers.ModelSerializer):
content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault())
object_id = serializers.IntegerField(read_only=True,
default=ReadOnlyDefault())
class Meta(object):
model = models.Generator
fields = ('content_type', 'object_id',
'generating', 'generation_time', 'autogenerate',
'allow_pauses', 'minimum_between_generations')
read_only_fields = ('generating', 'generation_time')
class RealmSerializer(serializers.Serializer):
content_type = serializers.SerializerMethodField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
generator = serializers.SerializerMethodField(required=False)
def get_content_type(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_generator(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
generator = models.Generator.objects.get(
content_type=ct, object_id=obj.pk)
except models.Generator.DoesNotExist:
return None
return GeneratorSerializer(generator).data
|
<commit_before>from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_attribute(self, obj):
return obj
class RealmSerializer(serializers.Serializer):
content_type = ContentTypeField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
<commit_msg>Support nested generator inside the realm.<commit_after>
|
from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, value):
return u'{value.app_label}.{value.model}'.format(value=value)
def to_internal_value(self, data):
app_label, model = data.split('.')
return ContentType.objects.get_by_natural_key(app_label, model)
class ReadOnlyDefault(object):
def set_context(self, serializer_field):
self.current_value = getattr(serializer_field.parent.instance,
serializer_field.name, None)
def __call__(self):
return self.current_value
def __repr__(self):
return '%s()' % (self.__class__.__name__,)
class GeneratorSerializer(serializers.ModelSerializer):
content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault())
object_id = serializers.IntegerField(read_only=True,
default=ReadOnlyDefault())
class Meta(object):
model = models.Generator
fields = ('content_type', 'object_id',
'generating', 'generation_time', 'autogenerate',
'allow_pauses', 'minimum_between_generations')
read_only_fields = ('generating', 'generation_time')
class RealmSerializer(serializers.Serializer):
content_type = serializers.SerializerMethodField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
generator = serializers.SerializerMethodField(required=False)
def get_content_type(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_generator(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
generator = models.Generator.objects.get(
content_type=ct, object_id=obj.pk)
except models.Generator.DoesNotExist:
return None
return GeneratorSerializer(generator).data
|
from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_attribute(self, obj):
return obj
class RealmSerializer(serializers.Serializer):
content_type = ContentTypeField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
Support nested generator inside the realm.from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, value):
return u'{value.app_label}.{value.model}'.format(value=value)
def to_internal_value(self, data):
app_label, model = data.split('.')
return ContentType.objects.get_by_natural_key(app_label, model)
class ReadOnlyDefault(object):
def set_context(self, serializer_field):
self.current_value = getattr(serializer_field.parent.instance,
serializer_field.name, None)
def __call__(self):
return self.current_value
def __repr__(self):
return '%s()' % (self.__class__.__name__,)
class GeneratorSerializer(serializers.ModelSerializer):
content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault())
object_id = serializers.IntegerField(read_only=True,
default=ReadOnlyDefault())
class Meta(object):
model = models.Generator
fields = ('content_type', 'object_id',
'generating', 'generation_time', 'autogenerate',
'allow_pauses', 'minimum_between_generations')
read_only_fields = ('generating', 'generation_time')
class RealmSerializer(serializers.Serializer):
content_type = serializers.SerializerMethodField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
generator = serializers.SerializerMethodField(required=False)
def get_content_type(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_generator(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
generator = models.Generator.objects.get(
content_type=ct, object_id=obj.pk)
except models.Generator.DoesNotExist:
return None
return GeneratorSerializer(generator).data
|
<commit_before>from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_attribute(self, obj):
return obj
class RealmSerializer(serializers.Serializer):
content_type = ContentTypeField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
<commit_msg>Support nested generator inside the realm.<commit_after>from django.contrib.contenttypes.models import ContentType
from rest_framework import serializers
from . import models
class ContentTypeField(serializers.Field):
def to_representation(self, value):
return u'{value.app_label}.{value.model}'.format(value=value)
def to_internal_value(self, data):
app_label, model = data.split('.')
return ContentType.objects.get_by_natural_key(app_label, model)
class ReadOnlyDefault(object):
def set_context(self, serializer_field):
self.current_value = getattr(serializer_field.parent.instance,
serializer_field.name, None)
def __call__(self):
return self.current_value
def __repr__(self):
return '%s()' % (self.__class__.__name__,)
class GeneratorSerializer(serializers.ModelSerializer):
content_type = ContentTypeField(read_only=True, default=ReadOnlyDefault())
object_id = serializers.IntegerField(read_only=True,
default=ReadOnlyDefault())
class Meta(object):
model = models.Generator
fields = ('content_type', 'object_id',
'generating', 'generation_time', 'autogenerate',
'allow_pauses', 'minimum_between_generations')
read_only_fields = ('generating', 'generation_time')
class RealmSerializer(serializers.Serializer):
content_type = serializers.SerializerMethodField()
object_id = serializers.IntegerField(source='pk')
repr = serializers.CharField(source='__repr__')
generator = serializers.SerializerMethodField(required=False)
def get_content_type(self, obj):
ct = ContentType.objects.get_for_model(obj)
return u'{ct.app_label}.{ct.model}'.format(ct=ct)
def get_generator(self, obj):
ct = ContentType.objects.get_for_model(obj)
try:
generator = models.Generator.objects.get(
content_type=ct, object_id=obj.pk)
except models.Generator.DoesNotExist:
return None
return GeneratorSerializer(generator).data
|
1f75d6b1d13814207c5585da166e59f3d67af4c1
|
stickord/commands/xkcd.py
|
stickord/commands/xkcd.py
|
'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
|
'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
try:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
except ValueError:
pass
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
|
Fix crash on invalid int
|
Fix crash on invalid int
|
Python
|
mit
|
RobinSikkens/Sticky-discord
|
'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
Fix crash on invalid int
|
'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
try:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
except ValueError:
pass
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
|
<commit_before>'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
<commit_msg>Fix crash on invalid int<commit_after>
|
'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
try:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
except ValueError:
pass
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
|
'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
Fix crash on invalid int'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
try:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
except ValueError:
pass
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
|
<commit_before>'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
<commit_msg>Fix crash on invalid int<commit_after>'''
Provides commands to the xkcd system
'''
from stickord.helpers.xkcd_api import get_random, get_by_id, print_comic, get_recent
from stickord.registry import Command
@Command('xkcd', category='xkcd')
async def get_comic(cont, _mesg):
''' Search for a comic by id, if no id is provided it will post a random comic. '''
if cont:
try:
comic_id = int(cont[0])
comic = await get_by_id(comic_id)
return await print_comic(comic)
except ValueError:
pass
comic = await get_random()
return await print_comic(comic)
@Command('newxkcd', category='xkcd')
async def get_latest_comic(_cont, _mesg):
''' Posts the latest xkcd comic. '''
comic = await get_recent()
return await print_comic(comic)
|
936a8b77625f881f39f2433fad126f1b5d73fa3f
|
commands.py
|
commands.py
|
from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run --source local_settings -m unittest discover "
"&& coverage report"
)
else:
_local("python -m unittest discover")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
|
from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run "
"--source src/local_settings "
"-m unittest discover "
"-t . -s tests "
"&& coverage report"
)
else:
_local("python -m unittest discover -t . -s tests")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
|
Fix test command, esp. wrt. coverage
|
Fix test command, esp. wrt. coverage
|
Python
|
mit
|
wylee/django-local-settings
|
from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run --source local_settings -m unittest discover "
"&& coverage report"
)
else:
_local("python -m unittest discover")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
Fix test command, esp. wrt. coverage
|
from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run "
"--source src/local_settings "
"-m unittest discover "
"-t . -s tests "
"&& coverage report"
)
else:
_local("python -m unittest discover -t . -s tests")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
|
<commit_before>from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run --source local_settings -m unittest discover "
"&& coverage report"
)
else:
_local("python -m unittest discover")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
<commit_msg>Fix test command, esp. wrt. coverage<commit_after>
|
from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run "
"--source src/local_settings "
"-m unittest discover "
"-t . -s tests "
"&& coverage report"
)
else:
_local("python -m unittest discover -t . -s tests")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
|
from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run --source local_settings -m unittest discover "
"&& coverage report"
)
else:
_local("python -m unittest discover")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
Fix test command, esp. wrt. coveragefrom runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run "
"--source src/local_settings "
"-m unittest discover "
"-t . -s tests "
"&& coverage report"
)
else:
_local("python -m unittest discover -t . -s tests")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
|
<commit_before>from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run --source local_settings -m unittest discover "
"&& coverage report"
)
else:
_local("python -m unittest discover")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
<commit_msg>Fix test command, esp. wrt. coverage<commit_after>from runcommands import command
from runcommands.commands import local as _local
@command
def format_code(check=False):
_local(f"black . {'--check' if check else ''}")
@command
def lint():
_local("flake8 .")
@command
def test(with_coverage=True, check=True):
if with_coverage:
_local(
"coverage run "
"--source src/local_settings "
"-m unittest discover "
"-t . -s tests "
"&& coverage report"
)
else:
_local("python -m unittest discover -t . -s tests")
if check:
format_code(check=True)
lint()
@command
def tox(clean=False):
_local(f"tox {'-r' if clean else ''}")
|
4d915a5a9056c17fe66f41a839a4ad8e3c0bffaa
|
test/conftest.py
|
test/conftest.py
|
import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.setup(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.setup(spark=True)
return station.engine()
|
import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.start(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.start(spark=True)
return station.engine()
|
Fix for new version of station
|
Fix for new version of station
|
Python
|
apache-2.0
|
j-friedrich/thunder,j-friedrich/thunder,thunder-project/thunder,jwittenbach/thunder
|
import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.setup(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.setup(spark=True)
return station.engine()
Fix for new version of station
|
import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.start(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.start(spark=True)
return station.engine()
|
<commit_before>import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.setup(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.setup(spark=True)
return station.engine()
<commit_msg>Fix for new version of station<commit_after>
|
import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.start(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.start(spark=True)
return station.engine()
|
import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.setup(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.setup(spark=True)
return station.engine()
Fix for new version of stationimport pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.start(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.start(spark=True)
return station.engine()
|
<commit_before>import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.setup(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.setup(spark=True)
return station.engine()
<commit_msg>Fix for new version of station<commit_after>import pytest
import station
@pytest.fixture(scope='module', params=['local', 'spark'])
def eng(request):
if request.param == 'local':
return None
if request.param == 'spark':
station.start(spark=True)
return station.engine()
@pytest.fixture(scope='module')
def engspark():
station.start(spark=True)
return station.engine()
|
a91ae078952471377505ffc09b58e8391d3b7713
|
extensions/ExtGameController.py
|
extensions/ExtGameController.py
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + game_modes
)
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
Update extensions and GameController subclass
|
Update extensions and GameController subclass
|
Python
|
apache-2.0
|
dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + game_modes
)
Update extensions and GameController subclass
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
<commit_before>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + game_modes
)
<commit_msg>Update extensions and GameController subclass<commit_after>
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + game_modes
)
Update extensions and GameController subclassfrom python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
<commit_before>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + game_modes
)
<commit_msg>Update extensions and GameController subclass<commit_after>from python_cowbull_game.GameController import GameController
from python_cowbull_game.GameMode import GameMode
class ExtGameController(GameController):
additional_modes = [
GameMode(mode="SuperTough", priority=6, digits=10, digit_type=0),
GameMode(mode="hexTough", priority=5, digits=3, guesses_allowed=3, digit_type=1)
]
def __init__(self, game_modes=None, mode=None, game_json=None):
super(ExtGameController, self).__init__(
game_json=game_json,
mode=mode,
game_modes=self.additional_modes + (game_modes or [])
)
|
d1be59a87fce8e20d698c4d1f6a272c21834a1c3
|
providers/popularity/kickasstorrents.py
|
providers/popularity/kickasstorrents.py
|
from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 1
def get_popular(self):
names = []
for page in range(Provider.PAGES_TO_FETCH):
url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
|
from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 3
def get_popular(self):
names = []
base = "https://kickasstorrents.to/highres-movies/"
# New mirrors can be found at https://thekickasstorrents.com/
for page in range(Provider.PAGES_TO_FETCH):
if page == 0:
url = base
else:
url = base + "%s/" % (page + 1)
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
|
Fix Kickasstorrents by using one of many mirrors.
|
Fix Kickasstorrents by using one of many mirrors.
|
Python
|
mit
|
EmilStenstrom/nephele
|
from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 1
def get_popular(self):
names = []
for page in range(Provider.PAGES_TO_FETCH):
url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
Fix Kickasstorrents by using one of many mirrors.
|
from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 3
def get_popular(self):
names = []
base = "https://kickasstorrents.to/highres-movies/"
# New mirrors can be found at https://thekickasstorrents.com/
for page in range(Provider.PAGES_TO_FETCH):
if page == 0:
url = base
else:
url = base + "%s/" % (page + 1)
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
|
<commit_before>from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 1
def get_popular(self):
names = []
for page in range(Provider.PAGES_TO_FETCH):
url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
<commit_msg>Fix Kickasstorrents by using one of many mirrors.<commit_after>
|
from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 3
def get_popular(self):
names = []
base = "https://kickasstorrents.to/highres-movies/"
# New mirrors can be found at https://thekickasstorrents.com/
for page in range(Provider.PAGES_TO_FETCH):
if page == 0:
url = base
else:
url = base + "%s/" % (page + 1)
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
|
from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 1
def get_popular(self):
names = []
for page in range(Provider.PAGES_TO_FETCH):
url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
Fix Kickasstorrents by using one of many mirrors.from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 3
def get_popular(self):
names = []
base = "https://kickasstorrents.to/highres-movies/"
# New mirrors can be found at https://thekickasstorrents.com/
for page in range(Provider.PAGES_TO_FETCH):
if page == 0:
url = base
else:
url = base + "%s/" % (page + 1)
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
|
<commit_before>from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 1
def get_popular(self):
names = []
for page in range(Provider.PAGES_TO_FETCH):
url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
<commit_msg>Fix Kickasstorrents by using one of many mirrors.<commit_after>from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 3
def get_popular(self):
names = []
base = "https://kickasstorrents.to/highres-movies/"
# New mirrors can be found at https://thekickasstorrents.com/
for page in range(Provider.PAGES_TO_FETCH):
if page == 0:
url = base
else:
url = base + "%s/" % (page + 1)
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
|
aff229797b3914b6708920fd247861d7777ccc22
|
framework/tasks/__init__.py
|
framework/tasks/__init__.py
|
# -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from kombu import Exchange, Queue
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
app.conf.CELERY_QUEUES = (
Queue(
settings.CELERY_DEFAULT_QUEUE,
Exchange(settings.CELERY_DEFAULT_QUEUE),
routing_key=settings.CELERY_DEFAULT_QUEUE),
)
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
|
# -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
|
Use auto generated celery queues
|
Use auto generated celery queues
|
Python
|
apache-2.0
|
jmcarp/osf.io,MerlinZhang/osf.io,Nesiehr/osf.io,leb2dg/osf.io,emetsger/osf.io,GageGaskins/osf.io,ticklemepierce/osf.io,jinluyuan/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,kch8qx/osf.io,KAsante95/osf.io,crcresearch/osf.io,aaxelb/osf.io,zamattiac/osf.io,zamattiac/osf.io,cwisecarver/osf.io,fabianvf/osf.io,lyndsysimon/osf.io,acshi/osf.io,saradbowman/osf.io,amyshi188/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,cwisecarver/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,kch8qx/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,icereval/osf.io,rdhyee/osf.io,chennan47/osf.io,njantrania/osf.io,lyndsysimon/osf.io,hmoco/osf.io,kwierman/osf.io,laurenrevere/osf.io,kwierman/osf.io,caseyrollins/osf.io,ckc6cz/osf.io,brianjgeiger/osf.io,abought/osf.io,RomanZWang/osf.io,dplorimer/osf,ZobairAlijan/osf.io,jeffreyliu3230/osf.io,monikagrabowska/osf.io,wearpants/osf.io,zachjanicki/osf.io,ZobairAlijan/osf.io,jeffreyliu3230/osf.io,mluke93/osf.io,samanehsan/osf.io,GageGaskins/osf.io,crcresearch/osf.io,haoyuchen1992/osf.io,mfraezz/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,acshi/osf.io,chrisseto/osf.io,jolene-esposito/osf.io,caneruguz/osf.io,crcresearch/osf.io,jnayak1/osf.io,abought/osf.io,pattisdr/osf.io,pattisdr/osf.io,kwierman/osf.io,cslzchen/osf.io,caseyrygt/osf.io,mfraezz/osf.io,doublebits/osf.io,doublebits/osf.io,jnayak1/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,sbt9uc/osf.io,hmoco/osf.io,caneruguz/osf.io,wearpants/osf.io,felliott/osf.io,jnayak1/osf.io,asanfilippo7/osf.io,mluke93/osf.io,laurenrevere/osf.io,reinaH/osf.io,brandonPurvis/osf.io,adlius/osf.io,jmcarp/osf.io,jmcarp/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,haoyuchen1992/osf.io,KAsante95/osf.io,amyshi188/osf.io,KAsante95/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,baylee-d/osf.io,arpitar/osf.io,amyshi188/osf.io,chrisseto/osf.io,KAsante95/osf.io,alexschiller/osf.io,billyhunt/osf.io,mluo613/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,jeffreyliu3230/osf.io,TomBaxter/osf.io,kch8qx/osf.io,mfraezz/osf.io,emetsger/osf.io,lyndsysimon/osf.io,icereval/osf.io,HarryRybacki/osf.io,aaxelb/osf.io,leb2dg/osf.io,jnayak1/osf.io,rdhyee/osf.io,wearpants/osf.io,reinaH/osf.io,TomBaxter/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,arpitar/osf.io,sloria/osf.io,njantrania/osf.io,hmoco/osf.io,HarryRybacki/osf.io,cosenal/osf.io,Ghalko/osf.io,HarryRybacki/osf.io,mluo613/osf.io,jolene-esposito/osf.io,adlius/osf.io,Ghalko/osf.io,asanfilippo7/osf.io,cosenal/osf.io,SSJohns/osf.io,asanfilippo7/osf.io,leb2dg/osf.io,SSJohns/osf.io,mluke93/osf.io,mluke93/osf.io,SSJohns/osf.io,mluo613/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,abought/osf.io,cslzchen/osf.io,erinspace/osf.io,billyhunt/osf.io,mluo613/osf.io,zachjanicki/osf.io,samanehsan/osf.io,mattclark/osf.io,mfraezz/osf.io,mattclark/osf.io,doublebits/osf.io,rdhyee/osf.io,jinluyuan/osf.io,TomBaxter/osf.io,MerlinZhang/osf.io,bdyetton/prettychart,fabianvf/osf.io,brandonPurvis/osf.io,sloria/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,acshi/osf.io,zachjanicki/osf.io,DanielSBrown/osf.io,njantrania/osf.io,asanfilippo7/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,arpitar/osf.io,Nesiehr/osf.io,sbt9uc/osf.io,binoculars/osf.io,hmoco/osf.io,fabianvf/osf.io,alexschiller/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,Nesiehr/osf.io,bdyetton/prettychart,ZobairAlijan/osf.io,petermalcolm/osf.io,ckc6cz/osf.io,billyhunt/osf.io,caseyrygt/osf.io,dplorimer/osf,ticklemepierce/osf.io,mluo613/osf.io,DanielSBrown/osf.io,mattclark/osf.io,GageGaskins/osf.io,Ghalko/osf.io,chennan47/osf.io,brianjgeiger/osf.io,binoculars/osf.io,dplorimer/osf,haoyuchen1992/osf.io,brandonPurvis/osf.io,Ghalko/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,cslzchen/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,jolene-esposito/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,felliott/osf.io,jinluyuan/osf.io,CenterForOpenScience/osf.io,cldershem/osf.io,jinluyuan/osf.io,baylee-d/osf.io,amyshi188/osf.io,adlius/osf.io,danielneis/osf.io,ckc6cz/osf.io,emetsger/osf.io,icereval/osf.io,chennan47/osf.io,samanehsan/osf.io,zachjanicki/osf.io,binoculars/osf.io,MerlinZhang/osf.io,reinaH/osf.io,doublebits/osf.io,reinaH/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,erinspace/osf.io,RomanZWang/osf.io,rdhyee/osf.io,erinspace/osf.io,sloria/osf.io,caseyrollins/osf.io,jeffreyliu3230/osf.io,abought/osf.io,bdyetton/prettychart,wearpants/osf.io,RomanZWang/osf.io,njantrania/osf.io,fabianvf/osf.io,cosenal/osf.io,GageGaskins/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,danielneis/osf.io,HalcyonChimera/osf.io,jolene-esposito/osf.io,cslzchen/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,cldershem/osf.io,adlius/osf.io,cosenal/osf.io,petermalcolm/osf.io,alexschiller/osf.io,doublebits/osf.io,baylee-d/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,dplorimer/osf,ckc6cz/osf.io,bdyetton/prettychart,kwierman/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,saradbowman/osf.io,samchrisinger/osf.io,petermalcolm/osf.io,petermalcolm/osf.io,sbt9uc/osf.io,ticklemepierce/osf.io,cwisecarver/osf.io,cldershem/osf.io,samchrisinger/osf.io,felliott/osf.io,felliott/osf.io,chrisseto/osf.io,samchrisinger/osf.io,danielneis/osf.io,acshi/osf.io,pattisdr/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,cwisecarver/osf.io,sbt9uc/osf.io,arpitar/osf.io,caseyrollins/osf.io
|
# -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from kombu import Exchange, Queue
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
app.conf.CELERY_QUEUES = (
Queue(
settings.CELERY_DEFAULT_QUEUE,
Exchange(settings.CELERY_DEFAULT_QUEUE),
routing_key=settings.CELERY_DEFAULT_QUEUE),
)
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
Use auto generated celery queues
|
# -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
|
<commit_before># -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from kombu import Exchange, Queue
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
app.conf.CELERY_QUEUES = (
Queue(
settings.CELERY_DEFAULT_QUEUE,
Exchange(settings.CELERY_DEFAULT_QUEUE),
routing_key=settings.CELERY_DEFAULT_QUEUE),
)
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
<commit_msg>Use auto generated celery queues<commit_after>
|
# -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
|
# -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from kombu import Exchange, Queue
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
app.conf.CELERY_QUEUES = (
Queue(
settings.CELERY_DEFAULT_QUEUE,
Exchange(settings.CELERY_DEFAULT_QUEUE),
routing_key=settings.CELERY_DEFAULT_QUEUE),
)
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
Use auto generated celery queues# -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
|
<commit_before># -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from kombu import Exchange, Queue
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
app.conf.CELERY_QUEUES = (
Queue(
settings.CELERY_DEFAULT_QUEUE,
Exchange(settings.CELERY_DEFAULT_QUEUE),
routing_key=settings.CELERY_DEFAULT_QUEUE),
)
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
<commit_msg>Use auto generated celery queues<commit_after># -*- coding: utf-8 -*-
"""Asynchronous task queue module."""
from celery import Celery
from celery.utils.log import get_task_logger
from raven import Client
from raven.contrib.celery import register_signal
from website import settings
app = Celery()
# TODO: Hardcoded settings module. Should be set using framework's config handler
app.config_from_object('website.settings')
if settings.SENTRY_DSN:
client = Client(settings.SENTRY_DSN)
register_signal(client)
@app.task
def error_handler(task_id, task_name):
"""logs detailed message about tasks that raise exceptions
:param task_id: TaskID of the failed task
:param task_name: name of task that failed
"""
# get the current logger
logger = get_task_logger(__name__)
# query the broker for the AsyncResult
result = app.AsyncResult(task_id)
excep = result.get(propagate=False)
# log detailed error mesage in error log
logger.error('#####FAILURE LOG BEGIN#####\n'
'Task {0} raised exception: {0}\n\{0}\n'
'#####FAILURE LOG STOP#####'.format(task_name, excep, result.traceback))
|
4f903a6b974fafba7c4a17b41e0a75b6b62209b3
|
ghtools/command/__init__.py
|
ghtools/command/__init__.py
|
import logging
from os import environ as env
__all__ = []
_log_level_default = logging.WARN
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
|
import logging
from os import environ as env
__all__ = []
_log_level_default = logging.INFO
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
|
Change default loglevel to INFO
|
Change default loglevel to INFO
|
Python
|
mit
|
alphagov/ghtools
|
import logging
from os import environ as env
__all__ = []
_log_level_default = logging.WARN
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
Change default loglevel to INFO
|
import logging
from os import environ as env
__all__ = []
_log_level_default = logging.INFO
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
|
<commit_before>import logging
from os import environ as env
__all__ = []
_log_level_default = logging.WARN
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
<commit_msg>Change default loglevel to INFO<commit_after>
|
import logging
from os import environ as env
__all__ = []
_log_level_default = logging.INFO
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
|
import logging
from os import environ as env
__all__ = []
_log_level_default = logging.WARN
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
Change default loglevel to INFOimport logging
from os import environ as env
__all__ = []
_log_level_default = logging.INFO
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
|
<commit_before>import logging
from os import environ as env
__all__ = []
_log_level_default = logging.WARN
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
<commit_msg>Change default loglevel to INFO<commit_after>import logging
from os import environ as env
__all__ = []
_log_level_default = logging.INFO
_log_level = getattr(logging, env.get('GHTOOLS_LOGLEVEL', '').upper(), _log_level_default)
logging.basicConfig(format='%(levelname)s: %(message)s', level=_log_level)
|
f919aa183d1a82ba745df6a5640e8a7a83f8e87e
|
stix/indicator/valid_time.py
|
stix/indicator/valid_time.py
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
class ValidTime(stix.Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
self._fields = {}
self.start_time = start_time
self.end_time = end_time
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
from mixbox.entities import Entity
class ValidTime(Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
super(ValidTime, self).__init__()
self.start_time = start_time
self.end_time = end_time
|
Change ValidTime to a mixbox Entity
|
Change ValidTime to a mixbox Entity
|
Python
|
bsd-3-clause
|
STIXProject/python-stix
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
class ValidTime(stix.Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
self._fields = {}
self.start_time = start_time
self.end_time = end_time
Change ValidTime to a mixbox Entity
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
from mixbox.entities import Entity
class ValidTime(Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
super(ValidTime, self).__init__()
self.start_time = start_time
self.end_time = end_time
|
<commit_before># Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
class ValidTime(stix.Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
self._fields = {}
self.start_time = start_time
self.end_time = end_time
<commit_msg>Change ValidTime to a mixbox Entity<commit_after>
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
from mixbox.entities import Entity
class ValidTime(Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
super(ValidTime, self).__init__()
self.start_time = start_time
self.end_time = end_time
|
# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
class ValidTime(stix.Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
self._fields = {}
self.start_time = start_time
self.end_time = end_time
Change ValidTime to a mixbox Entity# Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
from mixbox.entities import Entity
class ValidTime(Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
super(ValidTime, self).__init__()
self.start_time = start_time
self.end_time = end_time
|
<commit_before># Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
class ValidTime(stix.Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
self._fields = {}
self.start_time = start_time
self.end_time = end_time
<commit_msg>Change ValidTime to a mixbox Entity<commit_after># Copyright (c) 2015, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import stix
from stix.common import DateTimeWithPrecision
import stix.bindings.indicator as indicator_binding
from mixbox.entities import Entity
class ValidTime(Entity):
_namespace = "http://stix.mitre.org/Indicator-2"
_binding = indicator_binding
_binding_class = _binding.ValidTimeType
start_time = fields.TypedField("Start_Time", DateTimeWithPrecision)
end_time = fields.TypedField("End_Time", DateTimeWithPrecision)
def __init__(self, start_time=None, end_time=None):
super(ValidTime, self).__init__()
self.start_time = start_time
self.end_time = end_time
|
bc6e6f0faec8405849c896b0661c181e9853359d
|
match/management/commands/import-users.py
|
match/management/commands/import-users.py
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
User.objects.all().delete()
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
Delete users before importing them
|
Delete users before importing them
|
Python
|
mit
|
maxf/address-matcher,maxf/address-matcher,maxf/address-matcher,maxf/address-matcher
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
Delete users before importing them
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
User.objects.all().delete()
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
<commit_msg>Delete users before importing them<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
User.objects.all().delete()
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
Delete users before importing themfrom django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
User.objects.all().delete()
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
<commit_msg>Delete users before importing them<commit_after>from django.core.management.base import BaseCommand, CommandError
from match.models import User
import csv
import sys
class Command(BaseCommand):
help = 'Import a list of users from stdin'
def handle(self, *args, **options):
# read a file and copy its contents as test users
User.objects.all().delete()
tsvin = csv.reader(sys.stdin, delimiter='\t')
for row in tsvin:
User.objects.create(
name = row[0]
)
|
d656c0117e8487b8b56b4ee3caceb2dcb38ec198
|
sympy/concrete/tests/test_gosper.py
|
sympy/concrete/tests/test_gosper.py
|
def test_normal():
pass
def test_gosper():
pass
|
from sympy import Symbol, normal
from sympy.abc import n
def test_normal():
assert normal(4*n+5, 2*(4*n+1)*(2*n+3), n)
def test_gosper():
pass
|
Add test for part of gosper's algorithm.
|
Add test for part of gosper's algorithm.
|
Python
|
bsd-3-clause
|
abhiii5459/sympy,mafiya69/sympy,atreyv/sympy,wanglongqi/sympy,pandeyadarsh/sympy,liangjiaxing/sympy,srjoglekar246/sympy,Sumith1896/sympy,bukzor/sympy,atsao72/sympy,sunny94/temp,moble/sympy,cccfran/sympy,yashsharan/sympy,drufat/sympy,maniteja123/sympy,AunShiLord/sympy,shikil/sympy,pandeyadarsh/sympy,Davidjohnwilson/sympy,beni55/sympy,kumarkrishna/sympy,jerli/sympy,ga7g08/sympy,hargup/sympy,atsao72/sympy,madan96/sympy,emon10005/sympy,Curious72/sympy,maniteja123/sympy,lindsayad/sympy,amitjamadagni/sympy,toolforger/sympy,garvitr/sympy,ga7g08/sympy,mafiya69/sympy,kevalds51/sympy,kaushik94/sympy,Vishluck/sympy,Titan-C/sympy,oliverlee/sympy,Titan-C/sympy,iamutkarshtiwari/sympy,mcdaniel67/sympy,lidavidm/sympy,MridulS/sympy,Curious72/sympy,mafiya69/sympy,abloomston/sympy,debugger22/sympy,oliverlee/sympy,asm666/sympy,kaichogami/sympy,shikil/sympy,saurabhjn76/sympy,ga7g08/sympy,pbrady/sympy,chaffra/sympy,farhaanbukhsh/sympy,saurabhjn76/sympy,shipci/sympy,sahilshekhawat/sympy,Designist/sympy,liangjiaxing/sympy,liangjiaxing/sympy,Designist/sympy,cccfran/sympy,meghana1995/sympy,flacjacket/sympy,abloomston/sympy,asm666/sympy,yukoba/sympy,garvitr/sympy,chaffra/sympy,cswiercz/sympy,vipulroxx/sympy,sunny94/temp,toolforger/sympy,AkademieOlympia/sympy,sahilshekhawat/sympy,jerli/sympy,sampadsaha5/sympy,Titan-C/sympy,debugger22/sympy,yashsharan/sympy,Sumith1896/sympy,postvakje/sympy,maniteja123/sympy,Arafatk/sympy,pbrady/sympy,kumarkrishna/sympy,Gadal/sympy,VaibhavAgarwalVA/sympy,aktech/sympy,jamesblunt/sympy,kevalds51/sympy,sahilshekhawat/sympy,dqnykamp/sympy,drufat/sympy,sunny94/temp,wanglongqi/sympy,jaimahajan1997/sympy,pandeyadarsh/sympy,Arafatk/sympy,beni55/sympy,vipulroxx/sympy,AkademieOlympia/sympy,beni55/sympy,emon10005/sympy,farhaanbukhsh/sympy,MridulS/sympy,asm666/sympy,sampadsaha5/sympy,cccfran/sympy,Shaswat27/sympy,ChristinaZografou/sympy,MechCoder/sympy,wanglongqi/sympy,postvakje/sympy,ahhda/sympy,souravsingh/sympy,sahmed95/sympy,vipulroxx/sympy,lidavidm/sympy,kaichogami/sympy,hrashk/sympy,meghana1995/sympy,sampadsaha5/sympy,shipci/sympy,debugger22/sympy,shikil/sympy,minrk/sympy,cswiercz/sympy,hargup/sympy,jbbskinny/sympy,grevutiu-gabriel/sympy,diofant/diofant,mcdaniel67/sympy,kevalds51/sympy,hrashk/sympy,skidzo/sympy,atreyv/sympy,kumarkrishna/sympy,cswiercz/sympy,madan96/sympy,drufat/sympy,bukzor/sympy,Gadal/sympy,madan96/sympy,postvakje/sympy,wyom/sympy,kaushik94/sympy,kmacinnis/sympy,souravsingh/sympy,skirpichev/omg,rahuldan/sympy,atsao72/sympy,dqnykamp/sympy,souravsingh/sympy,rahuldan/sympy,kaushik94/sympy,farhaanbukhsh/sympy,Shaswat27/sympy,hrashk/sympy,Gadal/sympy,skidzo/sympy,iamutkarshtiwari/sympy,AunShiLord/sympy,wyom/sympy,Davidjohnwilson/sympy,abloomston/sympy,oliverlee/sympy,Mitchkoens/sympy,minrk/sympy,Vishluck/sympy,shipci/sympy,Arafatk/sympy,garvitr/sympy,saurabhjn76/sympy,Vishluck/sympy,grevutiu-gabriel/sympy,VaibhavAgarwalVA/sympy,kaichogami/sympy,jaimahajan1997/sympy,skidzo/sympy,iamutkarshtiwari/sympy,moble/sympy,Mitchkoens/sympy,kmacinnis/sympy,ahhda/sympy,MridulS/sympy,hargup/sympy,jbbskinny/sympy,jamesblunt/sympy,jamesblunt/sympy,meghana1995/sympy,ahhda/sympy,kmacinnis/sympy,jaimahajan1997/sympy,MechCoder/sympy,VaibhavAgarwalVA/sympy,yashsharan/sympy,Designist/sympy,pbrady/sympy,dqnykamp/sympy,yukoba/sympy,atreyv/sympy,aktech/sympy,AunShiLord/sympy,Curious72/sympy,ChristinaZografou/sympy,abhiii5459/sympy,emon10005/sympy,rahuldan/sympy,sahmed95/sympy,moble/sympy,bukzor/sympy,mcdaniel67/sympy,AkademieOlympia/sympy,grevutiu-gabriel/sympy,MechCoder/sympy,abhiii5459/sympy,jbbskinny/sympy,Sumith1896/sympy,jerli/sympy,Davidjohnwilson/sympy,lindsayad/sympy,sahmed95/sympy,Mitchkoens/sympy,ChristinaZografou/sympy,wyom/sympy,toolforger/sympy,yukoba/sympy,lidavidm/sympy,lindsayad/sympy,Shaswat27/sympy,aktech/sympy,amitjamadagni/sympy,chaffra/sympy
|
def test_normal():
pass
def test_gosper():
pass
Add test for part of gosper's algorithm.
|
from sympy import Symbol, normal
from sympy.abc import n
def test_normal():
assert normal(4*n+5, 2*(4*n+1)*(2*n+3), n)
def test_gosper():
pass
|
<commit_before>def test_normal():
pass
def test_gosper():
pass
<commit_msg>Add test for part of gosper's algorithm.<commit_after>
|
from sympy import Symbol, normal
from sympy.abc import n
def test_normal():
assert normal(4*n+5, 2*(4*n+1)*(2*n+3), n)
def test_gosper():
pass
|
def test_normal():
pass
def test_gosper():
pass
Add test for part of gosper's algorithm.from sympy import Symbol, normal
from sympy.abc import n
def test_normal():
assert normal(4*n+5, 2*(4*n+1)*(2*n+3), n)
def test_gosper():
pass
|
<commit_before>def test_normal():
pass
def test_gosper():
pass
<commit_msg>Add test for part of gosper's algorithm.<commit_after>from sympy import Symbol, normal
from sympy.abc import n
def test_normal():
assert normal(4*n+5, 2*(4*n+1)*(2*n+3), n)
def test_gosper():
pass
|
bcb84a5b85259644ec0949f820ac1ed8f03d1676
|
scripts/reactions.py
|
scripts/reactions.py
|
import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in TerminalView(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.set_defaults(lower_bound=0, spins=False, references=True)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
|
import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView, StudiesTerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
if 'studies' == self.kwargs.get('view'):
self.view_cls = StudiesTerminalView
else:
self.view_cls = TerminalView
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in self.view_cls(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.add_argument('--view', type=str, dest='view')
parser.set_defaults(
lower_bound = 0,
spins = False,
references = True,
view = 'default',
)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
|
Add ability to print out studies view from command line.
|
Add ability to print out studies view from command line.
|
Python
|
mit
|
emwalker/lenrmc
|
import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in TerminalView(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.set_defaults(lower_bound=0, spins=False, references=True)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
Add ability to print out studies view from command line.
|
import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView, StudiesTerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
if 'studies' == self.kwargs.get('view'):
self.view_cls = StudiesTerminalView
else:
self.view_cls = TerminalView
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in self.view_cls(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.add_argument('--view', type=str, dest='view')
parser.set_defaults(
lower_bound = 0,
spins = False,
references = True,
view = 'default',
)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
|
<commit_before>import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in TerminalView(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.set_defaults(lower_bound=0, spins=False, references=True)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
<commit_msg>Add ability to print out studies view from command line.<commit_after>
|
import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView, StudiesTerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
if 'studies' == self.kwargs.get('view'):
self.view_cls = StudiesTerminalView
else:
self.view_cls = TerminalView
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in self.view_cls(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.add_argument('--view', type=str, dest='view')
parser.set_defaults(
lower_bound = 0,
spins = False,
references = True,
view = 'default',
)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
|
import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in TerminalView(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.set_defaults(lower_bound=0, spins=False, references=True)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
Add ability to print out studies view from command line.import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView, StudiesTerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
if 'studies' == self.kwargs.get('view'):
self.view_cls = StudiesTerminalView
else:
self.view_cls = TerminalView
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in self.view_cls(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.add_argument('--view', type=str, dest='view')
parser.set_defaults(
lower_bound = 0,
spins = False,
references = True,
view = 'default',
)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
|
<commit_before>import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in TerminalView(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.set_defaults(lower_bound=0, spins=False, references=True)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
<commit_msg>Add ability to print out studies view from command line.<commit_after>import argparse
from lenrmc.nubase import System
from lenrmc.terminal import TerminalView, StudiesTerminalView
class App(object):
def __init__(self, **kwargs):
self.kwargs = kwargs
if 'studies' == self.kwargs.get('view'):
self.view_cls = StudiesTerminalView
else:
self.view_cls = TerminalView
def run(self):
s = System.parse(self.kwargs['system'], **self.kwargs)
for line in self.view_cls(s).lines(**self.kwargs):
print(line)
def parse_arguments():
parser = argparse.ArgumentParser()
parser.add_argument('system', type=str)
parser.add_argument('--lb', dest='lower_bound')
parser.add_argument('--spins', dest='spins', action='store_true')
parser.add_argument('--references', dest='references', action='store_true')
parser.add_argument('--view', type=str, dest='view')
parser.set_defaults(
lower_bound = 0,
spins = False,
references = True,
view = 'default',
)
return parser.parse_args()
if '__main__' == __name__:
opts = parse_arguments()
App(**vars(opts)).run()
|
00103355232a0efb76f401dbec3ab4f8be32526a
|
qual/calendar.py
|
qual/calendar.py
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
Handle julian leap days separately.
|
Handle julian leap days separately.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
Handle julian leap days separately.
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
<commit_before>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
<commit_msg>Handle julian leap days separately.<commit_after>
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
Handle julian leap days separately.from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
<commit_before>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
<commit_msg>Handle julian leap days separately.<commit_after>from datetime import date, timedelta
class DateWithCalendar(object):
def __init__(self, calendar_class, date):
self.calendar = calendar_class
self.date = date
def convert_to(self, calendar):
return calendar.from_date(self.date)
def __eq__(self, other):
return self.calendar == other.calendar and self.date == other.date
class Calendar(object):
def from_date(self, date):
return DateWithCalendar(self.__class__, date)
class ProlepticGregorianCalendar(Calendar):
def date(self, year, month, day):
d = date(year, month, day)
return self.from_date(d)
class JulianCalendar(Calendar):
@staticmethod
def is_julian_leap_year(y):
return (y % 4) == 0
def date(self, year, month, day):
if day == 29 and month == 2 and self.is_julian_leap_year(year):
d = date(year, month, 28)
d = d + timedelta(days=11)
else:
d = date(year, month, day)
d = d + timedelta(days=10)
return self.from_date(d)
|
c4cd02672c81a486f6397ea099b9ed1e95b05df6
|
docs/tests.py
|
docs/tests.py
|
import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_make_docs(self):
call_command("make_docs")
|
import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_doc_pages_404(self):
response = self.client.get(reverse(views.docs_pages, args=("notarealpage", )))
self.assertEqual(response.status_code, 404)
def test_make_docs(self):
call_command("make_docs")
|
Add test case for 404 on docs pages
|
Add test case for 404 on docs pages
|
Python
|
mit
|
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
|
import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_make_docs(self):
call_command("make_docs")Add test case for 404 on docs pages
|
import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_doc_pages_404(self):
response = self.client.get(reverse(views.docs_pages, args=("notarealpage", )))
self.assertEqual(response.status_code, 404)
def test_make_docs(self):
call_command("make_docs")
|
<commit_before>import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_make_docs(self):
call_command("make_docs")<commit_msg>Add test case for 404 on docs pages<commit_after>
|
import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_doc_pages_404(self):
response = self.client.get(reverse(views.docs_pages, args=("notarealpage", )))
self.assertEqual(response.status_code, 404)
def test_make_docs(self):
call_command("make_docs")
|
import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_make_docs(self):
call_command("make_docs")Add test case for 404 on docs pagesimport os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_doc_pages_404(self):
response = self.client.get(reverse(views.docs_pages, args=("notarealpage", )))
self.assertEqual(response.status_code, 404)
def test_make_docs(self):
call_command("make_docs")
|
<commit_before>import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_make_docs(self):
call_command("make_docs")<commit_msg>Add test case for 404 on docs pages<commit_after>import os
from django.test import Client, TestCase
from django.core.urlresolvers import reverse
from django.core.management import call_command
import views
class DocsTestCase(TestCase):
def setUp(self):
self.client = Client()
def test_index(self):
response = self.client.get(reverse(views.index))
self.assertEqual(response.status_code, 200)
def test_doc_pages(self):
names = os.listdir("docs/other")
pages = [x.replace("_plain", "").replace(".md", "") for x in names]
pages += ["technical"]
for page in pages:
response = self.client.get(reverse(views.docs_pages, args=(page, )))
self.assertEqual(response.status_code, 200)
def test_doc_pages_404(self):
response = self.client.get(reverse(views.docs_pages, args=("notarealpage", )))
self.assertEqual(response.status_code, 404)
def test_make_docs(self):
call_command("make_docs")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.