commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1a594aec0b5af4c815c90d9a19abdf941fb5f5e4
|
cogs/command_log.py
|
cogs/command_log.py
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
.format(ctx, args))
def setup(liara):
liara.add_cog(CommandLog())
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
msg = '{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})' \
.format(ctx, args)
if ctx.bot.shard_id is not None:
msg += ' on shard {}'.format(ctx.bot.shard_id+1)
self.log.info(msg)
def setup(liara):
liara.add_cog(CommandLog())
|
Add the shard ID to the command log
|
Add the shard ID to the command log
|
Python
|
mit
|
Thessia/Liara
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
.format(ctx, args))
def setup(liara):
liara.add_cog(CommandLog())
Add the shard ID to the command log
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
msg = '{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})' \
.format(ctx, args)
if ctx.bot.shard_id is not None:
msg += ' on shard {}'.format(ctx.bot.shard_id+1)
self.log.info(msg)
def setup(liara):
liara.add_cog(CommandLog())
|
<commit_before>import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
.format(ctx, args))
def setup(liara):
liara.add_cog(CommandLog())
<commit_msg>Add the shard ID to the command log<commit_after>
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
msg = '{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})' \
.format(ctx, args)
if ctx.bot.shard_id is not None:
msg += ' on shard {}'.format(ctx.bot.shard_id+1)
self.log.info(msg)
def setup(liara):
liara.add_cog(CommandLog())
|
import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
.format(ctx, args))
def setup(liara):
liara.add_cog(CommandLog())
Add the shard ID to the command logimport logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
msg = '{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})' \
.format(ctx, args)
if ctx.bot.shard_id is not None:
msg += ' on shard {}'.format(ctx.bot.shard_id+1)
self.log.info(msg)
def setup(liara):
liara.add_cog(CommandLog())
|
<commit_before>import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
self.log.info('{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})'
.format(ctx, args))
def setup(liara):
liara.add_cog(CommandLog())
<commit_msg>Add the shard ID to the command log<commit_after>import logging
class CommandLog:
"""A simple cog to log commands executed."""
def __init__(self):
self.log = logging.getLogger('liara.command_log')
async def on_command(self, ctx):
kwargs = ', '.join(['{}={}'.format(k, repr(v)) for k, v in ctx.kwargs.items()])
args = 'with arguments {} '.format(kwargs) if kwargs else ''
msg = '{0.author} ({0.author.id}) executed command "{0.command}" {1}in {0.guild} ({0.guild.id})' \
.format(ctx, args)
if ctx.bot.shard_id is not None:
msg += ' on shard {}'.format(ctx.bot.shard_id+1)
self.log.info(msg)
def setup(liara):
liara.add_cog(CommandLog())
|
cdcc2dd6342b47e1387beca54677ff7114fc48ec
|
cms/tests/test_externals.py
|
cms/tests/test_externals.py
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
with external.context_manager('')():
self.assertTrue(True)
|
Improve test coverage of externals.
|
Improve test coverage of externals.
|
Python
|
bsd-3-clause
|
dan-gamble/cms,jamesfoley/cms,dan-gamble/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,jamesfoley/cms,danielsamuels/cms,danielsamuels/cms,lewiscollard/cms,lewiscollard/cms,danielsamuels/cms,jamesfoley/cms
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
Improve test coverage of externals.
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
with external.context_manager('')():
self.assertTrue(True)
|
<commit_before>from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
<commit_msg>Improve test coverage of externals.<commit_after>
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
with external.context_manager('')():
self.assertTrue(True)
|
from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
Improve test coverage of externals.from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
with external.context_manager('')():
self.assertTrue(True)
|
<commit_before>from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
<commit_msg>Improve test coverage of externals.<commit_after>from django.test import TestCase
from ..externals import External
try:
from contextlib import GeneratorContextManager
except ImportError:
from contextlib import _GeneratorContextManager as GeneratorContextManager
from types import FunctionType
class TestExternals(TestCase):
def test_load(self):
external = External('foo')
with self.assertRaises(ImportError):
external._load('')
def test_load_class(self):
external = External('foo')
self.assertIsInstance(external.load_class(''), object)
self.assertTrue(external.load_class('', fallback=True))
def test_load_method(self):
external = External('foo')
self.assertIsNone(external.load_method('')())
self.assertTrue(external.load_method('', fallback=True))
def test_context_manager(self):
external = External('foo')
self.assertIs(type(external.context_manager('')), FunctionType)
self.assertIsInstance(external.context_manager('')(), GeneratorContextManager)
self.assertTrue(external.context_manager('', fallback=True))
with external.context_manager('')():
self.assertTrue(True)
|
67eeb5c7638b15a7f01f60260408fd3aefad549a
|
meinberlin/apps/offlineevents/templatetags/offlineevent_tags.py
|
meinberlin/apps/offlineevents/templatetags/offlineevent_tags.py
|
from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x = x.first_phase_start_date if isinstance(x, Module) else x.date
if x is None:
return 1
y = y.first_phase_start_date if isinstance(y, Module) else y.date
if y is None:
return -1
return (x > y) - (y < x)
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
|
from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x_date = x.first_phase_start_date if isinstance(x, Module) else x.date
if x_date is None:
return 1
y_date = y.first_phase_start_date if isinstance(y, Module) else y.date
if y_date is None:
return -1
if x_date > y_date:
return 1
elif x_date == y_date:
return 0
else:
return -1
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
|
Make comparison of dates more explicit
|
Make comparison of dates more explicit
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x = x.first_phase_start_date if isinstance(x, Module) else x.date
if x is None:
return 1
y = y.first_phase_start_date if isinstance(y, Module) else y.date
if y is None:
return -1
return (x > y) - (y < x)
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
Make comparison of dates more explicit
|
from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x_date = x.first_phase_start_date if isinstance(x, Module) else x.date
if x_date is None:
return 1
y_date = y.first_phase_start_date if isinstance(y, Module) else y.date
if y_date is None:
return -1
if x_date > y_date:
return 1
elif x_date == y_date:
return 0
else:
return -1
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
|
<commit_before>from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x = x.first_phase_start_date if isinstance(x, Module) else x.date
if x is None:
return 1
y = y.first_phase_start_date if isinstance(y, Module) else y.date
if y is None:
return -1
return (x > y) - (y < x)
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
<commit_msg>Make comparison of dates more explicit<commit_after>
|
from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x_date = x.first_phase_start_date if isinstance(x, Module) else x.date
if x_date is None:
return 1
y_date = y.first_phase_start_date if isinstance(y, Module) else y.date
if y_date is None:
return -1
if x_date > y_date:
return 1
elif x_date == y_date:
return 0
else:
return -1
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
|
from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x = x.first_phase_start_date if isinstance(x, Module) else x.date
if x is None:
return 1
y = y.first_phase_start_date if isinstance(y, Module) else y.date
if y is None:
return -1
return (x > y) - (y < x)
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
Make comparison of dates more explicitfrom functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x_date = x.first_phase_start_date if isinstance(x, Module) else x.date
if x_date is None:
return 1
y_date = y.first_phase_start_date if isinstance(y, Module) else y.date
if y_date is None:
return -1
if x_date > y_date:
return 1
elif x_date == y_date:
return 0
else:
return -1
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
|
<commit_before>from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x = x.first_phase_start_date if isinstance(x, Module) else x.date
if x is None:
return 1
y = y.first_phase_start_date if isinstance(y, Module) else y.date
if y is None:
return -1
return (x > y) - (y < x)
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
<commit_msg>Make comparison of dates more explicit<commit_after>from functools import cmp_to_key
from django import template
from adhocracy4.modules.models import Module
from adhocracy4.phases.models import Phase
from meinberlin.apps.offlineevents.models import OfflineEvent
register = template.Library()
@register.assignment_tag
def offlineevents_and_modules_sorted(project):
modules = list(project.module_set.all())
events = list(OfflineEvent.objects.filter(project=project))
res = modules + events
return sorted(res, key=cmp_to_key(_cmp))
def _cmp(x, y):
x_date = x.first_phase_start_date if isinstance(x, Module) else x.date
if x_date is None:
return 1
y_date = y.first_phase_start_date if isinstance(y, Module) else y.date
if y_date is None:
return -1
if x_date > y_date:
return 1
elif x_date == y_date:
return 0
else:
return -1
@register.filter
def is_phase(obj):
return isinstance(obj, Phase)
@register.filter
def is_module(obj):
return isinstance(obj, Module)
@register.filter
def is_offlineevent(obj):
return isinstance(obj, OfflineEvent)
|
dc04ba245522fe5b9376aa30621bffd8c02b600a
|
huxley/__init__.py
|
huxley/__init__.py
|
# Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
__all__ = ['HuxleyTestCase', 'unittest_main', ]
|
# Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
from huxley.version import __version__
__all__ = ['HuxleyTestCase', 'unittest_main', ]
|
Fix __version__ missing for setuptools
|
Fix __version__ missing for setuptools
|
Python
|
apache-2.0
|
ijl/gossamer,ijl/gossamer
|
# Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
__all__ = ['HuxleyTestCase', 'unittest_main', ]
Fix __version__ missing for setuptools
|
# Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
from huxley.version import __version__
__all__ = ['HuxleyTestCase', 'unittest_main', ]
|
<commit_before># Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
__all__ = ['HuxleyTestCase', 'unittest_main', ]
<commit_msg>Fix __version__ missing for setuptools<commit_after>
|
# Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
from huxley.version import __version__
__all__ = ['HuxleyTestCase', 'unittest_main', ]
|
# Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
__all__ = ['HuxleyTestCase', 'unittest_main', ]
Fix __version__ missing for setuptools# Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
from huxley.version import __version__
__all__ = ['HuxleyTestCase', 'unittest_main', ]
|
<commit_before># Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
__all__ = ['HuxleyTestCase', 'unittest_main', ]
<commit_msg>Fix __version__ missing for setuptools<commit_after># Copyright (c) 2013 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
See main.py.
"""
from huxley.integration import HuxleyTestCase, unittest_main
from huxley.version import __version__
__all__ = ['HuxleyTestCase', 'unittest_main', ]
|
336d778a49e0e09996ea647b8a1c3c9e414dd313
|
jenkins/test/validators/common.py
|
jenkins/test/validators/common.py
|
''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + cmd
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
|
''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + " ".join(cmd)
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
|
Fix logging for system commands in CI
|
Fix logging for system commands in CI
|
Python
|
apache-2.0
|
tiwillia/openshift-tools,joelsmith/openshift-tools,tiwillia/openshift-tools,openshift/openshift-tools,joelddiaz/openshift-tools,joelddiaz/openshift-tools,twiest/openshift-tools,joelsmith/openshift-tools,blrm/openshift-tools,ivanhorvath/openshift-tools,rhdedgar/openshift-tools,rhdedgar/openshift-tools,drewandersonnz/openshift-tools,rhdedgar/openshift-tools,rhdedgar/openshift-tools,twiest/openshift-tools,openshift/openshift-tools,blrm/openshift-tools,tiwillia/openshift-tools,drewandersonnz/openshift-tools,openshift/openshift-tools,joelsmith/openshift-tools,joelddiaz/openshift-tools,rhdedgar/openshift-tools,twiest/openshift-tools,drewandersonnz/openshift-tools,twiest/openshift-tools,drewandersonnz/openshift-tools,twiest/openshift-tools,drewandersonnz/openshift-tools,blrm/openshift-tools,ivanhorvath/openshift-tools,blrm/openshift-tools,blrm/openshift-tools,openshift/openshift-tools,tiwillia/openshift-tools,drewandersonnz/openshift-tools,joelddiaz/openshift-tools,ivanhorvath/openshift-tools,ivanhorvath/openshift-tools,openshift/openshift-tools,ivanhorvath/openshift-tools,tiwillia/openshift-tools,twiest/openshift-tools,joelsmith/openshift-tools,openshift/openshift-tools,joelddiaz/openshift-tools,joelddiaz/openshift-tools,blrm/openshift-tools,ivanhorvath/openshift-tools
|
''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + cmd
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
Fix logging for system commands in CI
|
''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + " ".join(cmd)
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
|
<commit_before>''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + cmd
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
<commit_msg>Fix logging for system commands in CI<commit_after>
|
''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + " ".join(cmd)
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
|
''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + cmd
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
Fix logging for system commands in CI''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + " ".join(cmd)
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
|
<commit_before>''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + cmd
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
<commit_msg>Fix logging for system commands in CI<commit_after>''' Provide common utils to validators '''
import subprocess
import sys
# Run cli command. By default, exit when an error occurs
def run_cli_cmd(cmd, exit_on_fail=True):
'''Run a command and return its output'''
print "Running system command: " + " ".join(cmd)
proc = subprocess.Popen(cmd, bufsize=-1, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=False)
stdout, stderr = proc.communicate()
if proc.returncode != 0:
if exit_on_fail:
print stdout
print "Unable to run " + " ".join(cmd) + " due to error: " + stderr
sys.exit(proc.returncode)
else:
return False, stdout
else:
return True, stdout
|
111019266e15f59a358c0842815cd7368d89982f
|
rbm2m/views/public.py
|
rbm2m/views/public.py
|
# -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
|
# -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(client_ip(), request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
def client_ip():
"""
Returns client ip address
"""
if 'HTTP_X_REAL_IP' in request.environ:
return request.environ['HTTP_X_REAL_IP']
return request.remote_addr
|
Use HTTP_X_REAL_IP to determine client ip address
|
Use HTTP_X_REAL_IP to determine client ip address
|
Python
|
apache-2.0
|
notapresent/rbm2m,notapresent/rbm2m
|
# -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
Use HTTP_X_REAL_IP to determine client ip address
|
# -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(client_ip(), request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
def client_ip():
"""
Returns client ip address
"""
if 'HTTP_X_REAL_IP' in request.environ:
return request.environ['HTTP_X_REAL_IP']
return request.remote_addr
|
<commit_before># -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
<commit_msg>Use HTTP_X_REAL_IP to determine client ip address<commit_after>
|
# -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(client_ip(), request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
def client_ip():
"""
Returns client ip address
"""
if 'HTTP_X_REAL_IP' in request.environ:
return request.environ['HTTP_X_REAL_IP']
return request.remote_addr
|
# -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
Use HTTP_X_REAL_IP to determine client ip address# -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(client_ip(), request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
def client_ip():
"""
Returns client ip address
"""
if 'HTTP_X_REAL_IP' in request.environ:
return request.environ['HTTP_X_REAL_IP']
return request.remote_addr
|
<commit_before># -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
<commit_msg>Use HTTP_X_REAL_IP to determine client ip address<commit_after># -*- coding: utf-8 -*-
import logging
from flask import Blueprint, render_template, request, send_from_directory, current_app
from ..webapp import db
from ..action import export_manager, genre_manager, exporter
bp = Blueprint('public', __name__)
logger = logging.getLogger(__name__)
@bp.route('/yml')
def yml():
"""
YML export endpoint
"""
exp = exporter.YMLExporter(db.session)
exp.log_export(client_ip(), request.user_agent)
ctx = {
'generation_date': exp.generation_date(),
'genres': exp.category_list(),
'offers': exp.offers()
}
return render_template('yml.xml', **ctx)
@bp.route('/table')
def table():
"""
Table export endpoint
"""
exp = exporter.TableExporter(db.session)
exp.log_export(request.remote_addr, request.user_agent)
ctx = {
'genres': exp.category_list(),
'rows': exp.rows()
}
return render_template('table.html', **ctx)
@bp.route('/media/<path:path>')
def serve_media(path):
return send_from_directory(current_app.config['MEDIA_DIR'], path)
def client_ip():
"""
Returns client ip address
"""
if 'HTTP_X_REAL_IP' in request.environ:
return request.environ['HTTP_X_REAL_IP']
return request.remote_addr
|
30c97e8a377b40f42855d38167768f9eb8e374fc
|
base/views.py
|
base/views.py
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = 'windows'
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
Use form variable instead hard-coding
|
Use form variable instead hard-coding
|
Python
|
mit
|
djangogirlstaipei/djangogirlstaipei,djangogirlstaipei/djangogirlstaipei,djangogirlstaipei/djangogirlstaipei
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = 'windows'
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
Use form variable instead hard-coding
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
<commit_before>from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = 'windows'
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
<commit_msg>Use form variable instead hard-coding<commit_after>
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = 'windows'
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
Use form variable instead hard-codingfrom .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
<commit_before>from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = 'windows'
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
<commit_msg>Use form variable instead hard-coding<commit_after>from .utils import SESSION_KEY_CURRENT_OS
from .forms import OSForm
class CurrentOSMixin(object):
allowed_oses = OSForm.OS_CHOICES
def get_context_data(self, **kwargs):
"""Inject current active OS key and the choice form into context.
"""
# Zip the 2-tuple into a [keys, values] generator, and use next() to
# get its first item (i.e. keys).
allowed_os_keys = next(zip(*self.allowed_oses))
os = self.request.session.get(SESSION_KEY_CURRENT_OS)
if os not in allowed_os_keys:
os = OSForm.OS_CHOICES[0][0]
os_form = OSForm(initial={'os': os})
kwargs.update({'current_os': os, 'os_form': os_form})
return super().get_context_data(**kwargs)
|
621432d1bafe54220ad22afc285aae1c71de0875
|
custom/icds_reports/tasks.py
|
custom/icds_reports/tasks.py
|
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
cursor.execute(sql_to_execute, {"interval": interval})
|
import logging
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
celery_task_logger = logging.getLogger('celery.task')
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
celery_task_logger.info("Starting icds reports update_location_tables")
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
celery_task_logger.info("Ended icds reports update_location_tables_sql")
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
celery_task_logger.info(
"Starting icds reports {} update_monthly_aggregate_tables".format(interval)
)
cursor.execute(sql_to_execute, {"interval": interval})
celery_task_logger.info(
"Ended icds reports {} update_monthly_aggregate_tables".format(interval)
)
|
Add logging to icds reports task
|
Add logging to icds reports task
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
cursor.execute(sql_to_execute, {"interval": interval})
Add logging to icds reports task
|
import logging
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
celery_task_logger = logging.getLogger('celery.task')
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
celery_task_logger.info("Starting icds reports update_location_tables")
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
celery_task_logger.info("Ended icds reports update_location_tables_sql")
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
celery_task_logger.info(
"Starting icds reports {} update_monthly_aggregate_tables".format(interval)
)
cursor.execute(sql_to_execute, {"interval": interval})
celery_task_logger.info(
"Ended icds reports {} update_monthly_aggregate_tables".format(interval)
)
|
<commit_before>import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
cursor.execute(sql_to_execute, {"interval": interval})
<commit_msg>Add logging to icds reports task<commit_after>
|
import logging
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
celery_task_logger = logging.getLogger('celery.task')
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
celery_task_logger.info("Starting icds reports update_location_tables")
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
celery_task_logger.info("Ended icds reports update_location_tables_sql")
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
celery_task_logger.info(
"Starting icds reports {} update_monthly_aggregate_tables".format(interval)
)
cursor.execute(sql_to_execute, {"interval": interval})
celery_task_logger.info(
"Ended icds reports {} update_monthly_aggregate_tables".format(interval)
)
|
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
cursor.execute(sql_to_execute, {"interval": interval})
Add logging to icds reports taskimport logging
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
celery_task_logger = logging.getLogger('celery.task')
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
celery_task_logger.info("Starting icds reports update_location_tables")
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
celery_task_logger.info("Ended icds reports update_location_tables_sql")
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
celery_task_logger.info(
"Starting icds reports {} update_monthly_aggregate_tables".format(interval)
)
cursor.execute(sql_to_execute, {"interval": interval})
celery_task_logger.info(
"Ended icds reports {} update_monthly_aggregate_tables".format(interval)
)
|
<commit_before>import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
cursor.execute(sql_to_execute, {"interval": interval})
<commit_msg>Add logging to icds reports task<commit_after>import logging
import os
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db import connections
celery_task_logger = logging.getLogger('celery.task')
@periodic_task(run_every=crontab(minute=0, hour=0), acks_late=True)
def move_ucr_data_into_aggregation_tables():
if hasattr(settings, "ICDS_UCR_DATABASE_ALIAS") and settings.ICDS_UCR_DATABASE_ALIAS:
with connections[settings.ICDS_UCR_DATABASE_ALIAS].cursor() as cursor:
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_locations_table.sql')
celery_task_logger.info("Starting icds reports update_location_tables")
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
cursor.execute(sql_to_execute)
celery_task_logger.info("Ended icds reports update_location_tables_sql")
path = os.path.join(os.path.dirname(__file__), 'sql_templates', 'update_monthly_aggregate_tables.sql')
with open(path, "r") as sql_file:
sql_to_execute = sql_file.read()
for interval in ["0 months", "1 months", "2 months"]:
celery_task_logger.info(
"Starting icds reports {} update_monthly_aggregate_tables".format(interval)
)
cursor.execute(sql_to_execute, {"interval": interval})
celery_task_logger.info(
"Ended icds reports {} update_monthly_aggregate_tables".format(interval)
)
|
352ab7fa385835bd68b42eb60a5b149bcfb28865
|
pyblogit/posts.py
|
pyblogit/posts.py
|
"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
|
"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
"""The post data model"""
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
|
Add docstring to Post class
|
Add docstring to Post class
|
Python
|
mit
|
jamalmoir/pyblogit
|
"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
Add docstring to Post class
|
"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
"""The post data model"""
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
|
<commit_before>"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
<commit_msg>Add docstring to Post class<commit_after>
|
"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
"""The post data model"""
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
|
"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
Add docstring to Post class"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
"""The post data model"""
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
|
<commit_before>"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
<commit_msg>Add docstring to Post class<commit_after>"""
pyblogit.posts
~~~~~~~~~~~~~~
This module contains the data model to represent blog posts and methods
to manipulate it.
"""
class post(object):
"""The post data model"""
def __init__(self, post_id, title, url, author, content, images, labels,
status):
self._post_id = post_id
self._title = title
self._url = url
self._author = author
self._content = content
self._images = images
self._labels = labels
@property
def post_id(self):
return self._post_id
@property
def title(self):
return self._title
@property
def url(self):
return self._url
@property
def author(self):
return self._author
@property
def content(self):
return self._content
@property
def images(self):
return self._images
@property
def labels(self):
return self._labels
|
7d0300b8571fc0732818e194644a6a669c69ffeb
|
src/Spill/runtests.py
|
src/Spill/runtests.py
|
import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['./spill', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['./id', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
|
import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['runghc', '-i../../src', 'Main.hs', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['runghc', '-i../../src', 'Id.hs', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
|
Use runghc instead of executables
|
Use runghc instead of executables
|
Python
|
bsd-3-clause
|
mhuesch/scheme_compiler,mhuesch/scheme_compiler
|
import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['./spill', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['./id', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
Use runghc instead of executables
|
import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['runghc', '-i../../src', 'Main.hs', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['runghc', '-i../../src', 'Id.hs', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['./spill', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['./id', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
<commit_msg>Use runghc instead of executables<commit_after>
|
import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['runghc', '-i../../src', 'Main.hs', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['runghc', '-i../../src', 'Id.hs', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
|
import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['./spill', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['./id', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
Use runghc instead of executablesimport subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['runghc', '-i../../src', 'Main.hs', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['runghc', '-i../../src', 'Id.hs', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['./spill', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['./id', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
<commit_msg>Use runghc instead of executables<commit_after>import subprocess
import unittest
import glob
def runTestcase(self, input_file, result_file):
spillProg = subprocess.Popen(['runghc', '-i../../src', 'Main.hs', input_file],stdout=subprocess.PIPE)
idProg = subprocess.Popen(['runghc', '-i../../src', 'Id.hs', result_file],stdout=subprocess.PIPE)
with open(result_file) as results:
self.assertEqual(spillProg.stdout.read(), idProg.stdout.read())
class Tests(unittest.TestCase):
def testValidateAll(self):
all_inputs = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.L2f'))
all_results = sorted(glob.glob('../../322-interps/tests/mhuesch/spill-test/*.sres'))
for i,r in zip(all_inputs, all_results):
print i
runTestcase(self, i, r)
if __name__ == '__main__':
unittest.main()
|
c47df6cf4533676c33ca3466cb269657df3e228f
|
intexration/__main__.py
|
intexration/__main__.py
|
import argparse
import logging.config
import os
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()
|
import argparse
import logging.config
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('SERVER', 'host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('SERVER', 'port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()
|
Set config contained a bug after refactoring
|
Set config contained a bug after refactoring
|
Python
|
apache-2.0
|
JDevlieghere/InTeXration,JDevlieghere/InTeXration
|
import argparse
import logging.config
import os
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()Set config contained a bug after refactoring
|
import argparse
import logging.config
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('SERVER', 'host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('SERVER', 'port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()
|
<commit_before>import argparse
import logging.config
import os
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()<commit_msg>Set config contained a bug after refactoring<commit_after>
|
import argparse
import logging.config
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('SERVER', 'host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('SERVER', 'port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()
|
import argparse
import logging.config
import os
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()Set config contained a bug after refactoringimport argparse
import logging.config
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('SERVER', 'host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('SERVER', 'port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()
|
<commit_before>import argparse
import logging.config
import os
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()<commit_msg>Set config contained a bug after refactoring<commit_after>import argparse
import logging.config
from intexration import settings
from intexration.server import Server
# Logger
logging.config.fileConfig(settings.LOGGING_FILE)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-host', help='Change the hostname')
parser.add_argument('-port', help='Change the port')
args = parser.parse_args()
if args.host is not None:
settings.set_config('SERVER', 'host', args.host)
logging.INFO("Host changed to %s", args.host)
if args.port is not None:
settings.set_config('SERVER', 'port', args.port)
logging.INFO("Port changed to %s", args.port)
if not settings.all_files_exist():
raise RuntimeError("Some necessary files were missing. Please consult the log.")
server = Server(host=settings.get_config('SERVER', 'host'),
port=settings.get_config('SERVER', 'port'))
server.start()
if __name__ == '__main__':
main()
|
0d8282f31b74b6546f07fa37e88b59ed12e945c8
|
scripts/test_import_optional.py
|
scripts/test_import_optional.py
|
#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["t/import-optional-missing.neonx"])
|
#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "-o", "tmp/import-optional-missing.neonx", "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["tmp/import-optional-missing.neonx"])
|
Fix up import optional test to use temporary compiled file
|
Fix up import optional test to use temporary compiled file
|
Python
|
mit
|
ghewgill/neon-lang,ghewgill/neon-lang,ghewgill/neon-lang,ghewgill/neon-lang,ghewgill/neon-lang,ghewgill/neon-lang,ghewgill/neon-lang,ghewgill/neon-lang,ghewgill/neon-lang,ghewgill/neon-lang
|
#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["t/import-optional-missing.neonx"])
Fix up import optional test to use temporary compiled file
|
#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "-o", "tmp/import-optional-missing.neonx", "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["tmp/import-optional-missing.neonx"])
|
<commit_before>#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["t/import-optional-missing.neonx"])
<commit_msg>Fix up import optional test to use temporary compiled file<commit_after>
|
#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "-o", "tmp/import-optional-missing.neonx", "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["tmp/import-optional-missing.neonx"])
|
#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["t/import-optional-missing.neonx"])
Fix up import optional test to use temporary compiled file#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "-o", "tmp/import-optional-missing.neonx", "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["tmp/import-optional-missing.neonx"])
|
<commit_before>#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["t/import-optional-missing.neonx"])
<commit_msg>Fix up import optional test to use temporary compiled file<commit_after>#!/usr/bin/env python3
import os
import subprocess
import sys
neonc = sys.argv[1]
executor = sys.argv[2:]
out = subprocess.check_output([neonc, "-o", "tmp/import-optional-missing.neonx", "t/import-optional-missing.neon"], env={"NEONPATH": "t/compile-time-only"}, stderr=subprocess.STDOUT, universal_newlines=True)
sys.stdout.write(out)
if "not found" in out:
print("{}: Failed: expected compile step to find module2.neon".format(sys.argv[0]), file=sys.stderr)
sys.exit(1)
subprocess.check_call(executor + ["tmp/import-optional-missing.neonx"])
|
e83ba5e0eef34e7e5acaad95a490d2e078325842
|
pip_accel/logger.py
|
pip_accel/logger.py
|
# Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.DEBUG)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
|
# Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.INFO)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
|
Set logging level to INFO (changed to DEBUG in 47c50c0)
|
Set logging level to INFO (changed to DEBUG in 47c50c0)
IMHO, it's a UX issue.
|
Python
|
mit
|
paylogic/pip-accel,theyoprst/pip-accel,pombredanne/pip-accel,matysek/pip-accel,matysek/pip-accel,theyoprst/pip-accel,paylogic/pip-accel,pombredanne/pip-accel
|
# Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.DEBUG)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
Set logging level to INFO (changed to DEBUG in 47c50c0)
IMHO, it's a UX issue.
|
# Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.INFO)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
|
<commit_before># Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.DEBUG)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
<commit_msg>Set logging level to INFO (changed to DEBUG in 47c50c0)
IMHO, it's a UX issue.<commit_after>
|
# Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.INFO)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
|
# Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.DEBUG)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
Set logging level to INFO (changed to DEBUG in 47c50c0)
IMHO, it's a UX issue.# Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.INFO)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
|
<commit_before># Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.DEBUG)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
<commit_msg>Set logging level to INFO (changed to DEBUG in 47c50c0)
IMHO, it's a UX issue.<commit_after># Logging for the pip accelerator.
#
# Author: Peter Odding <peter.odding@paylogic.eu>
# Last Change: July 21, 2013
# URL: https://github.com/paylogic/pip-accel
"""
Logging for the pip accelerator.
"""
# Standard library modules.
import logging
import os
import sys
# External dependency.
import coloredlogs
coloredlogs.install()
# Initialize the logging subsystem.
logger = logging.getLogger('pip-accel')
logger.setLevel(logging.INFO)
# Check if the operator requested verbose output.
if '-v' in sys.argv or 'PIP_ACCEL_VERBOSE' in os.environ:
logging.getLogger().setLevel(logging.DEBUG)
|
c9587decdb67474959e1957378f6a4987e4c320a
|
apps/welcome/urls.py
|
apps/welcome/urls.py
|
from django.conf.urls.defaults import *
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': 'home'}, name='welcome-2' )
#url(r'^3$', 'welcome.views.profile', name='welcome-1' ),
)
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': '3'}, name='welcome-2' ),
url(r'^3$', 'education.views.module_search', {'next': '/'}, name='welcome-3' )
)
|
Add select modules to the welcome steps. Redirect fixes to skip
|
Add select modules to the welcome steps. Redirect fixes to skip
|
Python
|
bsd-3-clause
|
mfitzp/smrtr,mfitzp/smrtr
|
from django.conf.urls.defaults import *
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': 'home'}, name='welcome-2' )
#url(r'^3$', 'welcome.views.profile', name='welcome-1' ),
)
Add select modules to the welcome steps. Redirect fixes to skip
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': '3'}, name='welcome-2' ),
url(r'^3$', 'education.views.module_search', {'next': '/'}, name='welcome-3' )
)
|
<commit_before>from django.conf.urls.defaults import *
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': 'home'}, name='welcome-2' )
#url(r'^3$', 'welcome.views.profile', name='welcome-1' ),
)
<commit_msg>Add select modules to the welcome steps. Redirect fixes to skip<commit_after>
|
from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': '3'}, name='welcome-2' ),
url(r'^3$', 'education.views.module_search', {'next': '/'}, name='welcome-3' )
)
|
from django.conf.urls.defaults import *
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': 'home'}, name='welcome-2' )
#url(r'^3$', 'welcome.views.profile', name='welcome-1' ),
)
Add select modules to the welcome steps. Redirect fixes to skipfrom django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': '3'}, name='welcome-2' ),
url(r'^3$', 'education.views.module_search', {'next': '/'}, name='welcome-3' )
)
|
<commit_before>from django.conf.urls.defaults import *
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': 'home'}, name='welcome-2' )
#url(r'^3$', 'welcome.views.profile', name='welcome-1' ),
)
<commit_msg>Add select modules to the welcome steps. Redirect fixes to skip<commit_after>from django.conf.urls.defaults import *
from django.core.urlresolvers import reverse
# Smrtr
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^1$', 'welcome.views.profile', name='welcome-1' ),
url(r'^2$', 'network.views.search', {'next': '3'}, name='welcome-2' ),
url(r'^3$', 'education.views.module_search', {'next': '/'}, name='welcome-3' )
)
|
513d8e83dc7aea052682df2bc93cd146b6799406
|
client/examples/cycle-cards.py
|
client/examples/cycle-cards.py
|
#!/bin/python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
|
#!/usr/bin/env python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
|
Use python from env in example script
|
Use python from env in example script
This makes the cycle-cards example script use python from the env
instead of harcoding the location. This allows a virtualenv to be
easily used.
|
Python
|
apache-2.0
|
nkinder/smart-card-removinator
|
#!/bin/python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
Use python from env in example script
This makes the cycle-cards example script use python from the env
instead of harcoding the location. This allows a virtualenv to be
easily used.
|
#!/usr/bin/env python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
|
<commit_before>#!/bin/python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
<commit_msg>Use python from env in example script
This makes the cycle-cards example script use python from the env
instead of harcoding the location. This allows a virtualenv to be
easily used.<commit_after>
|
#!/usr/bin/env python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
|
#!/bin/python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
Use python from env in example script
This makes the cycle-cards example script use python from the env
instead of harcoding the location. This allows a virtualenv to be
easily used.#!/usr/bin/env python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
|
<commit_before>#!/bin/python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
<commit_msg>Use python from env in example script
This makes the cycle-cards example script use python from the env
instead of harcoding the location. This allows a virtualenv to be
easily used.<commit_after>#!/usr/bin/env python
import removinator
import subprocess
# This example cycles through each card slot in the Removinator. Any
# slots that have a card present will then have the certificates on the
# card printed out using the pkcs15-tool utility, which is provided by
# the OpenSC project.
#
# Examples of parsing the Removinator status output and enabling debug
# output from the firmware are also provided.
print('--- Connecting to Removinator ---')
ctl = removinator.Removinator()
print('--- Cycling through cards ---')
for card in range(1, 9):
try:
ctl.insert_card(card)
print('Inserted card {0}'.format(card))
print('{0}'.format(subprocess.check_output(['pkcs15-tool',
'--list-certificates'])
.rstrip()))
except removinator.SlotError:
print('Card {0} is not inserted'.format(card))
print('--- Checking Removinator status ---')
status = ctl.get_status()
print('Current card: {0}'.format(status['current']))
for card in status['present']:
print('Card {0} is present'.format(card))
print('--- Debug output for re-insertion of current card ---')
ctl.set_debug(True)
ctl.insert_card(status['current'])
print('{0}'.format(ctl.last_response.rstrip()))
ctl.set_debug(False)
print('--- Remove current card ---')
ctl.remove_card()
|
725bfcc3484826083c3e6cdca71b4af41b37a9c9
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
FLUENT_CONTENTS_CACHE_OUTPUT = True,
SITE_ID = 3
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
TEST_RUNNER='django.test.simple.DjangoTestSuiteRunner', # for Django 1.6, see https://docs.djangoproject.com/en/dev/releases/1.6/#new-test-runner
SITE_ID = 3,
FLUENT_CONTENTS_CACHE_OUTPUT = True,
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
Make sure tests are found in Django 1.6
|
Make sure tests are found in Django 1.6
|
Python
|
apache-2.0
|
jpotterm/django-fluent-contents,ixc/django-fluent-contents,edoburu/django-fluent-contents,jpotterm/django-fluent-contents,ixc/django-fluent-contents,django-fluent/django-fluent-contents,jpotterm/django-fluent-contents,ixc/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
FLUENT_CONTENTS_CACHE_OUTPUT = True,
SITE_ID = 3
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
Make sure tests are found in Django 1.6
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
TEST_RUNNER='django.test.simple.DjangoTestSuiteRunner', # for Django 1.6, see https://docs.djangoproject.com/en/dev/releases/1.6/#new-test-runner
SITE_ID = 3,
FLUENT_CONTENTS_CACHE_OUTPUT = True,
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
<commit_before>#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
FLUENT_CONTENTS_CACHE_OUTPUT = True,
SITE_ID = 3
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
<commit_msg>Make sure tests are found in Django 1.6<commit_after>
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
TEST_RUNNER='django.test.simple.DjangoTestSuiteRunner', # for Django 1.6, see https://docs.djangoproject.com/en/dev/releases/1.6/#new-test-runner
SITE_ID = 3,
FLUENT_CONTENTS_CACHE_OUTPUT = True,
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
FLUENT_CONTENTS_CACHE_OUTPUT = True,
SITE_ID = 3
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
Make sure tests are found in Django 1.6#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
TEST_RUNNER='django.test.simple.DjangoTestSuiteRunner', # for Django 1.6, see https://docs.djangoproject.com/en/dev/releases/1.6/#new-test-runner
SITE_ID = 3,
FLUENT_CONTENTS_CACHE_OUTPUT = True,
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
<commit_before>#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
FLUENT_CONTENTS_CACHE_OUTPUT = True,
SITE_ID = 3
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
<commit_msg>Make sure tests are found in Django 1.6<commit_after>#!/usr/bin/env python
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.messages',
'django.contrib.sites',
'django.contrib.admin',
'fluent_contents',
'fluent_contents.tests.testapp',
),
ROOT_URLCONF = 'fluent_contents.tests.testapp.urls',
TEST_RUNNER='django.test.simple.DjangoTestSuiteRunner', # for Django 1.6, see https://docs.djangoproject.com/en/dev/releases/1.6/#new-test-runner
SITE_ID = 3,
FLUENT_CONTENTS_CACHE_OUTPUT = True,
)
def runtests():
argv = sys.argv[:1] + ['test', 'fluent_contents', '--traceback'] + sys.argv[1:]
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
|
8fcf73183f895b6dc1dc7ebff847cbb2465c2b93
|
main.py
|
main.py
|
#!/usr/bin/env python3
|
#!/usr/bin/env python3
import argparse
import discord
import asyncio
class commands():
async def test(client, message, args):
await client.send_message(message.channel, "Tested!")
async def testedit(client, message, args):
sleep_time = 5
if len(args) > 0:
try: sleep_time = int(args[0])
except: pass
mid = await client.send_message(message.channel, "Editing this message in {} seconds...".format(sleep_time))
await asyncio.sleep(sleep_time)
await client.edit_message(mid, "Edited!")
class TransportLayerBot(discord.Client):
async def on_ready(self):
print("Logged in as {}, ID {}.".format(self.user.name, self.user.id))
async def on_message(self, message):
if not message.author == self.user.id:
if message.content[0] == '!':
command, *args = message.content[1:].split()
try:
clientCommand = getattr(commands, command)
try:
await clientCommand(self, message, args)
except Exception as e:
await self.send_message(message.channel, "Something broke:\n```{}```".format(e))
except AttributeError:
pass # Not a command.
def main():
parser = argparse.ArgumentParser(description="TransportLayerBot for Discord")
parser.add_argument("-t", "--token", type=str, metavar="TOKEN", dest="TOKEN", help="bot user application token", action="store", required=True)
SETTINGS = vars(parser.parse_args())
try:
print("Starting TransportLayerBot with Discord version {}...".format(discord.__version__))
client = TransportLayerBot()
client.run(SETTINGS["TOKEN"])
finally:
print("Stopping...")
client.logout()
if __name__ == "__main__":
main()
|
Add chat command framework and basic test commands.
|
Add chat command framework and basic test commands.
|
Python
|
agpl-3.0
|
TransportLayer/TransportLayerBot-Discord
|
#!/usr/bin/env python3
Add chat command framework and basic test commands.
|
#!/usr/bin/env python3
import argparse
import discord
import asyncio
class commands():
async def test(client, message, args):
await client.send_message(message.channel, "Tested!")
async def testedit(client, message, args):
sleep_time = 5
if len(args) > 0:
try: sleep_time = int(args[0])
except: pass
mid = await client.send_message(message.channel, "Editing this message in {} seconds...".format(sleep_time))
await asyncio.sleep(sleep_time)
await client.edit_message(mid, "Edited!")
class TransportLayerBot(discord.Client):
async def on_ready(self):
print("Logged in as {}, ID {}.".format(self.user.name, self.user.id))
async def on_message(self, message):
if not message.author == self.user.id:
if message.content[0] == '!':
command, *args = message.content[1:].split()
try:
clientCommand = getattr(commands, command)
try:
await clientCommand(self, message, args)
except Exception as e:
await self.send_message(message.channel, "Something broke:\n```{}```".format(e))
except AttributeError:
pass # Not a command.
def main():
parser = argparse.ArgumentParser(description="TransportLayerBot for Discord")
parser.add_argument("-t", "--token", type=str, metavar="TOKEN", dest="TOKEN", help="bot user application token", action="store", required=True)
SETTINGS = vars(parser.parse_args())
try:
print("Starting TransportLayerBot with Discord version {}...".format(discord.__version__))
client = TransportLayerBot()
client.run(SETTINGS["TOKEN"])
finally:
print("Stopping...")
client.logout()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
<commit_msg>Add chat command framework and basic test commands.<commit_after>
|
#!/usr/bin/env python3
import argparse
import discord
import asyncio
class commands():
async def test(client, message, args):
await client.send_message(message.channel, "Tested!")
async def testedit(client, message, args):
sleep_time = 5
if len(args) > 0:
try: sleep_time = int(args[0])
except: pass
mid = await client.send_message(message.channel, "Editing this message in {} seconds...".format(sleep_time))
await asyncio.sleep(sleep_time)
await client.edit_message(mid, "Edited!")
class TransportLayerBot(discord.Client):
async def on_ready(self):
print("Logged in as {}, ID {}.".format(self.user.name, self.user.id))
async def on_message(self, message):
if not message.author == self.user.id:
if message.content[0] == '!':
command, *args = message.content[1:].split()
try:
clientCommand = getattr(commands, command)
try:
await clientCommand(self, message, args)
except Exception as e:
await self.send_message(message.channel, "Something broke:\n```{}```".format(e))
except AttributeError:
pass # Not a command.
def main():
parser = argparse.ArgumentParser(description="TransportLayerBot for Discord")
parser.add_argument("-t", "--token", type=str, metavar="TOKEN", dest="TOKEN", help="bot user application token", action="store", required=True)
SETTINGS = vars(parser.parse_args())
try:
print("Starting TransportLayerBot with Discord version {}...".format(discord.__version__))
client = TransportLayerBot()
client.run(SETTINGS["TOKEN"])
finally:
print("Stopping...")
client.logout()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
Add chat command framework and basic test commands.#!/usr/bin/env python3
import argparse
import discord
import asyncio
class commands():
async def test(client, message, args):
await client.send_message(message.channel, "Tested!")
async def testedit(client, message, args):
sleep_time = 5
if len(args) > 0:
try: sleep_time = int(args[0])
except: pass
mid = await client.send_message(message.channel, "Editing this message in {} seconds...".format(sleep_time))
await asyncio.sleep(sleep_time)
await client.edit_message(mid, "Edited!")
class TransportLayerBot(discord.Client):
async def on_ready(self):
print("Logged in as {}, ID {}.".format(self.user.name, self.user.id))
async def on_message(self, message):
if not message.author == self.user.id:
if message.content[0] == '!':
command, *args = message.content[1:].split()
try:
clientCommand = getattr(commands, command)
try:
await clientCommand(self, message, args)
except Exception as e:
await self.send_message(message.channel, "Something broke:\n```{}```".format(e))
except AttributeError:
pass # Not a command.
def main():
parser = argparse.ArgumentParser(description="TransportLayerBot for Discord")
parser.add_argument("-t", "--token", type=str, metavar="TOKEN", dest="TOKEN", help="bot user application token", action="store", required=True)
SETTINGS = vars(parser.parse_args())
try:
print("Starting TransportLayerBot with Discord version {}...".format(discord.__version__))
client = TransportLayerBot()
client.run(SETTINGS["TOKEN"])
finally:
print("Stopping...")
client.logout()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python3
<commit_msg>Add chat command framework and basic test commands.<commit_after>#!/usr/bin/env python3
import argparse
import discord
import asyncio
class commands():
async def test(client, message, args):
await client.send_message(message.channel, "Tested!")
async def testedit(client, message, args):
sleep_time = 5
if len(args) > 0:
try: sleep_time = int(args[0])
except: pass
mid = await client.send_message(message.channel, "Editing this message in {} seconds...".format(sleep_time))
await asyncio.sleep(sleep_time)
await client.edit_message(mid, "Edited!")
class TransportLayerBot(discord.Client):
async def on_ready(self):
print("Logged in as {}, ID {}.".format(self.user.name, self.user.id))
async def on_message(self, message):
if not message.author == self.user.id:
if message.content[0] == '!':
command, *args = message.content[1:].split()
try:
clientCommand = getattr(commands, command)
try:
await clientCommand(self, message, args)
except Exception as e:
await self.send_message(message.channel, "Something broke:\n```{}```".format(e))
except AttributeError:
pass # Not a command.
def main():
parser = argparse.ArgumentParser(description="TransportLayerBot for Discord")
parser.add_argument("-t", "--token", type=str, metavar="TOKEN", dest="TOKEN", help="bot user application token", action="store", required=True)
SETTINGS = vars(parser.parse_args())
try:
print("Starting TransportLayerBot with Discord version {}...".format(discord.__version__))
client = TransportLayerBot()
client.run(SETTINGS["TOKEN"])
finally:
print("Stopping...")
client.logout()
if __name__ == "__main__":
main()
|
34211cad2b2601027af46bee63129fc39890f8d4
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'], when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
|
# -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
|
Replace a redundant reference to app.config
|
Replace a redundant reference to app.config
|
Python
|
mit
|
piotr-rusin/url-shortener,piotr-rusin/url-shortener
|
# -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'], when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
Replace a redundant reference to app.config
|
# -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
|
<commit_before># -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'], when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
<commit_msg>Replace a redundant reference to app.config<commit_after>
|
# -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
|
# -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'], when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
Replace a redundant reference to app.config# -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
|
<commit_before># -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'], when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
<commit_msg>Replace a redundant reference to app.config<commit_after># -*- coding: utf-8 -*-
"""
url-shortener
==============
An application for generating and storing shorter aliases for
requested URLs. Uses `spam-lists`__ to prevent generating a short URL
for an address recognized as spam, or to warn a user a pre-existing
short alias has a target that has been later recognized as spam.
.. __: https://github.com/piotr-rusin/spam-lists
"""
from url_shortener import app, event_handlers, views
__title__ = 'url-shortener'
__version__ = '0.9.0.dev1'
__author__ = 'Piotr Rusin'
__email__ = "piotr.rusin88@gmail.com"
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Piotr Rusin'
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.run()
|
c7b57235b669c3fac99bc1380d667fdd71e8ca3c
|
main.py
|
main.py
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
while True:
print(slackClient.rtm_read())
time.sleep(5)
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
slackClient.api_call("channels.join", name="#electrical")
while True:
for message in slackClient.rtm_read():
print(message)
if message["type"] == "team_join":
username = message["user"]["name"]
print(username)
message = "Welcome to the New Ro-Bots Slack, @{}! Please make sure to download this on your phone so we can get your attention! The app is available on both iOS and Android.".format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", link_names = True)
time.sleep(5)
|
Add welcome message pinging using real time api
|
Add welcome message pinging using real time api
|
Python
|
mit
|
ollien/Slack-Welcome-Bot
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
while True:
print(slackClient.rtm_read())
time.sleep(5)
Add welcome message pinging using real time api
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
slackClient.api_call("channels.join", name="#electrical")
while True:
for message in slackClient.rtm_read():
print(message)
if message["type"] == "team_join":
username = message["user"]["name"]
print(username)
message = "Welcome to the New Ro-Bots Slack, @{}! Please make sure to download this on your phone so we can get your attention! The app is available on both iOS and Android.".format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", link_names = True)
time.sleep(5)
|
<commit_before>import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
while True:
print(slackClient.rtm_read())
time.sleep(5)
<commit_msg>Add welcome message pinging using real time api<commit_after>
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
slackClient.api_call("channels.join", name="#electrical")
while True:
for message in slackClient.rtm_read():
print(message)
if message["type"] == "team_join":
username = message["user"]["name"]
print(username)
message = "Welcome to the New Ro-Bots Slack, @{}! Please make sure to download this on your phone so we can get your attention! The app is available on both iOS and Android.".format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", link_names = True)
time.sleep(5)
|
import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
while True:
print(slackClient.rtm_read())
time.sleep(5)
Add welcome message pinging using real time apiimport slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
slackClient.api_call("channels.join", name="#electrical")
while True:
for message in slackClient.rtm_read():
print(message)
if message["type"] == "team_join":
username = message["user"]["name"]
print(username)
message = "Welcome to the New Ro-Bots Slack, @{}! Please make sure to download this on your phone so we can get your attention! The app is available on both iOS and Android.".format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", link_names = True)
time.sleep(5)
|
<commit_before>import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
while True:
print(slackClient.rtm_read())
time.sleep(5)
<commit_msg>Add welcome message pinging using real time api<commit_after>import slackclient
import time
import os
slackClient = slackclient.SlackClient(os.environ["SLACK_TOKEN"])
slackClient.rtm_connect()
slackClient.api_call("channels.join", name="#electrical")
while True:
for message in slackClient.rtm_read():
print(message)
if message["type"] == "team_join":
username = message["user"]["name"]
print(username)
message = "Welcome to the New Ro-Bots Slack, @{}! Please make sure to download this on your phone so we can get your attention! The app is available on both iOS and Android.".format(username)
slackClient.api_call("chat.postMessage", channel="#general",
text=message, username="The New Ro-Bot", link_names = True)
time.sleep(5)
|
f56582dfcd8519a81c40d0df17df54f91f33f665
|
main.py
|
main.py
|
# Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick
)socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print net
|
# Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick )
socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print net
|
Fix typo on line 15
|
Fix typo on line 15
|
Python
|
mit
|
TommehM/pybot
|
# Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick
)socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print netFix typo on line 15
|
# Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick )
socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print net
|
<commit_before># Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick
)socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print net<commit_msg>Fix typo on line 15<commit_after>
|
# Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick )
socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print net
|
# Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick
)socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print netFix typo on line 15# Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick )
socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print net
|
<commit_before># Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick
)socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print net<commit_msg>Fix typo on line 15<commit_after># Copyright (c) 2013 Tom McLoughlin
import socket
# Configuration
myNick = "Bot"
myIdent = "Bot"
myReal = "Bot"
myIRC = "irc.example.org"
myPort = "6667"
myChan = "#example" # only supports a single channel
# Do not edit below this line
socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
socket.connect(myIRC,myPort)
socket.send( 'NICK ',myNick )
socket.send( 'USER ',myIdent, myIdent, myIdent' :',myReal'\r\n' )
socket.send( 'JOIN ',myChan )
# ok, now you can edit ^_^
while True:
net = socket.recv ( 4096 )
if net.find ( 'PING' ) != -1:
irc.send ( 'PONG ' + net.split() [ 1 ] + '\r\n' )
if net.find ( 'cookie' ) != -1:
irc.send ( 'PRIVMSG ',myChan,' :mmmm cookies ;)\r\n' )
print net
|
39650fe82bea3fc2bc036b8292f6f0b783b2b4d6
|
ecmd-core/pyapi/init/__init__.py
|
ecmd-core/pyapi/init/__init__.py
|
# import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
|
# import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.append(os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.append(os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
|
Append version specific path to os.path rather than prepend
|
pyapi: Append version specific path to os.path rather than prepend
Other code might depend on os.path[0] being the path of the executed
script, and there is no need for our new path to be at the front.
|
Python
|
apache-2.0
|
open-power/eCMD,open-power/eCMD,open-power/eCMD,open-power/eCMD,open-power/eCMD
|
# import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
pyapi: Append version specific path to os.path rather than prepend
Other code might depend on os.path[0] being the path of the executed
script, and there is no need for our new path to be at the front.
|
# import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.append(os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.append(os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
|
<commit_before># import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
<commit_msg>pyapi: Append version specific path to os.path rather than prepend
Other code might depend on os.path[0] being the path of the executed
script, and there is no need for our new path to be at the front.<commit_after>
|
# import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.append(os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.append(os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
|
# import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
pyapi: Append version specific path to os.path rather than prepend
Other code might depend on os.path[0] being the path of the executed
script, and there is no need for our new path to be at the front.# import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.append(os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.append(os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
|
<commit_before># import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.insert(0, os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
<commit_msg>pyapi: Append version specific path to os.path rather than prepend
Other code might depend on os.path[0] being the path of the executed
script, and there is no need for our new path to be at the front.<commit_after># import the right SWIG module depending on Python version
from sys import version_info
from sys import path as sys_path
from os import path as os_path
if version_info[0] >= 3:
sys_path.append(os_path.join(os_path.dirname(__file__), "python3"))
from .python3 import *
else:
sys_path.append(os_path.join(os_path.dirname(__file__), "python2"))
from .python2 import *
del sys_path, os_path, version_info
|
52abe8ef49f77ce859cba0a9042ea5761fcbcd90
|
fusionpy/__init__.py
|
fusionpy/__init__.py
|
#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
|
#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
"""
:param response: The HTTP response, having attributes .body and .status (or str or unicode)
:param request_body: The HTTP request body that percipitated this error
:param message: Any text to go along with this
:param url: The URL requested
"""
if response.__class__ is str or response.__class__ is unicode:
if message is None:
message = response
else:
message += response
response = None
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
if response is not None:
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
|
Deal with strings in the first param to the FusionError constructor
|
Deal with strings in the first param to the FusionError constructor
|
Python
|
mit
|
ke4roh/fusionpy
|
#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
Deal with strings in the first param to the FusionError constructor
|
#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
"""
:param response: The HTTP response, having attributes .body and .status (or str or unicode)
:param request_body: The HTTP request body that percipitated this error
:param message: Any text to go along with this
:param url: The URL requested
"""
if response.__class__ is str or response.__class__ is unicode:
if message is None:
message = response
else:
message += response
response = None
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
if response is not None:
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
|
<commit_before>#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
<commit_msg>Deal with strings in the first param to the FusionError constructor<commit_after>
|
#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
"""
:param response: The HTTP response, having attributes .body and .status (or str or unicode)
:param request_body: The HTTP request body that percipitated this error
:param message: Any text to go along with this
:param url: The URL requested
"""
if response.__class__ is str or response.__class__ is unicode:
if message is None:
message = response
else:
message += response
response = None
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
if response is not None:
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
|
#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
Deal with strings in the first param to the FusionError constructor#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
"""
:param response: The HTTP response, having attributes .body and .status (or str or unicode)
:param request_body: The HTTP request body that percipitated this error
:param message: Any text to go along with this
:param url: The URL requested
"""
if response.__class__ is str or response.__class__ is unicode:
if message is None:
message = response
else:
message += response
response = None
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
if response is not None:
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
|
<commit_before>#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
<commit_msg>Deal with strings in the first param to the FusionError constructor<commit_after>#!/usr/bin/python
from __future__ import print_function
__all__ = ['Fusion', 'FusionCollection', 'FusionError', 'FusionRequester', 'HttpFusionRequester']
class FusionError(IOError):
def __init__(self, response, request_body=None, message=None, url=None):
"""
:param response: The HTTP response, having attributes .body and .status (or str or unicode)
:param request_body: The HTTP request body that percipitated this error
:param message: Any text to go along with this
:param url: The URL requested
"""
if response.__class__ is str or response.__class__ is unicode:
if message is None:
message = response
else:
message += response
response = None
if message is None:
message = ""
if url is not None:
message = "Requested " + url + "\n"
if request_body is not None:
message += request_body
if response is not None:
message += "Status %d\n\n%s" % (response.status, response.data)
IOError.__init__(self, message)
self.response = response
self.url = url
|
709c45c39007692eecfcaa814ebb711b388670b1
|
functional_tests.py
|
functional_tests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
def test_dummy1():
assert True
def test_dummy2():
assert 0 == 1
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
# ludocore
from ludocore import generate_all
from ludocore import clean
# TODO work in a temp dir for convenience
# TODO do more in depth testing : output, dir and file created
def test_generate_all_on_hackathon_data():
# First we clean the rep
assert clean(None)
# Then we generate everything
assert generate_all(None)
|
Add real life test to functional tests... problem the actual code is at the moent not able to run on custom locations.
|
Add real life test to functional tests... problem the actual code is at the moent not able to run on custom locations.
|
Python
|
agpl-3.0
|
ludobox/ludobox,ludobox/ludobox-ui,ludobox/ludobox,ludobox/ludobox,ludobox/ludobox,ludobox/ludobox-ui,ludobox/ludobox-ui
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
def test_dummy1():
assert True
def test_dummy2():
assert 0 == 1Add real life test to functional tests... problem the actual code is at the moent not able to run on custom locations.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
# ludocore
from ludocore import generate_all
from ludocore import clean
# TODO work in a temp dir for convenience
# TODO do more in depth testing : output, dir and file created
def test_generate_all_on_hackathon_data():
# First we clean the rep
assert clean(None)
# Then we generate everything
assert generate_all(None)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
def test_dummy1():
assert True
def test_dummy2():
assert 0 == 1<commit_msg>Add real life test to functional tests... problem the actual code is at the moent not able to run on custom locations.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
# ludocore
from ludocore import generate_all
from ludocore import clean
# TODO work in a temp dir for convenience
# TODO do more in depth testing : output, dir and file created
def test_generate_all_on_hackathon_data():
# First we clean the rep
assert clean(None)
# Then we generate everything
assert generate_all(None)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
def test_dummy1():
assert True
def test_dummy2():
assert 0 == 1Add real life test to functional tests... problem the actual code is at the moent not able to run on custom locations.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
# ludocore
from ludocore import generate_all
from ludocore import clean
# TODO work in a temp dir for convenience
# TODO do more in depth testing : output, dir and file created
def test_generate_all_on_hackathon_data():
# First we clean the rep
assert clean(None)
# Then we generate everything
assert generate_all(None)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
def test_dummy1():
assert True
def test_dummy2():
assert 0 == 1<commit_msg>Add real life test to functional tests... problem the actual code is at the moent not able to run on custom locations.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Here are all the functional tests for the ludobox-ui application.
Those test are based on real life data provided by DCALK team members.
The objective of those test is to check that valid, real life input should
produce valid, real life output. We are not testing for corner cases but only
for actual production cases.
"""
# Python 3 compatibility
from __future__ import unicode_literals
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
# ludocore
from ludocore import generate_all
from ludocore import clean
# TODO work in a temp dir for convenience
# TODO do more in depth testing : output, dir and file created
def test_generate_all_on_hackathon_data():
# First we clean the rep
assert clean(None)
# Then we generate everything
assert generate_all(None)
|
3b66c6d8e2945d783904ba0f220772861e8e20ef
|
linguine/ops/StanfordCoreNLP.py
|
linguine/ops/StanfordCoreNLP.py
|
#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar')
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[coreNLPPath, coreNLPModelsPath])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
|
#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'},
corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
|
Allow relative paths for corenlp deps
|
Allow relative paths for corenlp deps
|
Python
|
mit
|
rigatoni/linguine-python,Pastafarians/linguine-python
|
#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar')
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[coreNLPPath, coreNLPModelsPath])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
Allow relative paths for corenlp deps
|
#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'},
corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
|
<commit_before>#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar')
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[coreNLPPath, coreNLPModelsPath])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
<commit_msg>Allow relative paths for corenlp deps<commit_after>
|
#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'},
corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
|
#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar')
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[coreNLPPath, coreNLPModelsPath])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
Allow relative paths for corenlp deps#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'},
corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
|
<commit_before>#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar')
coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar')
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'}, corenlp_jars=[coreNLPPath, coreNLPModelsPath])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
<commit_msg>Allow relative paths for corenlp deps<commit_after>#!/usr/bin/env python
import os
"""
Performs some core NLP operations as a proof of concept for the library.
"""
from stanford_corenlp_pywrapper import CoreNLP
class StanfordCoreNLP:
proc = None
"""
When the JSON segments return from the CoreNLP library, they
separate the data acquired from each word into their own element.
For readability's sake, it would be nice to pair all of the information
for a given word with that word, making a list of words with their
part of speech tags
"""
def jsonCleanup(self, data, analysisTypes):
for corpus in data:
res = StanfordCoreNLP.proc.parse_doc(corpus.contents)
print(str(res));
for sentence in res["sentences"]:
words = []
for index, token in enumerate(sentence["tokens"]):
word = {}
word["token"] = sentence["tokens"][index]
for atype in analysisTypes:
word[atype] = sentence[atype][index]
words.append(word)
return words
def __init__(self, analysisType):
self.analysisType = analysisType
if StanfordCoreNLP.proc == None:
StanfordCoreNLP.proc = CoreNLP(configdict={'annotators':'tokenize, ssplit, pos, lemma, ner'},
corenlp_jars=[os.path.join(os.path.dirname(__file__), '../../lib/*')])
def run(self, data):
return self.jsonCleanup(data, self.analysisType)
|
2add786f3173754f95900e0027cd78934a454bcf
|
salt/modules/mysql.py
|
salt/modules/mysql.py
|
'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def _connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = _connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = _connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
|
'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
|
Change _connect to connect, so that it can be used from within other modules
|
Change _connect to connect, so that it can be used from within other modules
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def _connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = _connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = _connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
Change _connect to connect, so that it can be used from within other modules
|
'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
|
<commit_before>'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def _connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = _connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = _connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
<commit_msg>Change _connect to connect, so that it can be used from within other modules<commit_after>
|
'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
|
'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def _connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = _connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = _connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
Change _connect to connect, so that it can be used from within other modules'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
|
<commit_before>'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def _connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = _connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = _connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
<commit_msg>Change _connect to connect, so that it can be used from within other modules<commit_after>'''
Module to provide MySQL compatibility to salt.
In order to connect to MySQL, certain configuration is required
in /etc/salt/minion on the relevant minions. Some sample configs
might look like:
mysql.host: 'localhost'
mysql.port: 3306
mysql.user: 'root'
mysql.pass: ''
mysql.db: 'mysql'
'''
import MySQLdb
__opts__ = {}
def connect():
'''
wrap authentication credentials here
'''
hostname = __opts__['mysql.host']
username = __opts__['mysql.user']
password = __opts__['mysql.pass']
dbport = __opts__['mysql.port']
dbname = __opts__['mysql.db']
db = MySQLdb.connect(
hostname,
username,
password,
dbname,
dbport,
)
db.autocommit(True)
return db
def status():
'''
Return the status of a MySQL server using the output
from the SHOW STATUS query.
CLI Example:
salt '*' mysql.status
'''
ret = {}
db = connect()
cur = db.cursor()
cur.execute('SHOW STATUS')
for i in xrange(cur.rowcount):
row = cur.fetchone()
ret[row[0]] = row[1]
return ret
def version():
'''
Return the version of a MySQL server using the output
from the SELECT VERSION() query.
CLI Example:
salt '*' mysql.version
'''
db = connect()
cur = db.cursor()
cur.execute('SELECT VERSION()')
row = cur.fetchone()
return row
|
9ddc347b8f44f2c4ac4b74725cfa258abaeb1398
|
blackjack.py
|
blackjack.py
|
import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[i/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
|
import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[(i-1)/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
|
Make it so we don't rely on weird rounding in python to assign card to the correct index
|
Make it so we don't rely on weird rounding in python to assign card to the correct index
|
Python
|
mit
|
JustinTulloss/blackjack
|
import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[i/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
Make it so we don't rely on weird rounding in python to assign card to the correct index
|
import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[(i-1)/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
|
<commit_before>import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[i/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
<commit_msg>Make it so we don't rely on weird rounding in python to assign card to the correct index<commit_after>
|
import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[(i-1)/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
|
import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[i/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
Make it so we don't rely on weird rounding in python to assign card to the correct indeximport random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[(i-1)/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
|
<commit_before>import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[i/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
<commit_msg>Make it so we don't rely on weird rounding in python to assign card to the correct index<commit_after>import random
class Blackjack:
def __init__(self):
self.deck = range(1, 53)
self.shuffle()
self.deal()
def shuffle(self):
random.shuffle(self.deck)
def deal(self):
self.player = [None, None]
self.dealer = [None, None]
for i in xrange(4):
if i % 2:
self.player[i/2] = self.deck.pop()
else:
self.dealer[(i-1)/2] = self.deck.pop()
if __name__ == "__main__":
"""
This is testing only, this module is intended
to be imported.
"""
bj = Blackjack()
print bj.player
print bj.dealer
|
56df83132b6885f8ef753fee42a39daff72f2f12
|
celery_app.py
|
celery_app.py
|
from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
|
from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_ACCEPT_CONTENT = ["json", "application/json"]
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
|
Add json to accepted celery content
|
Add json to accepted celery content
celery.conf.CELERY_ACCEPT_CONTENT accepts only json
|
Python
|
mit
|
tritanium-industries/TITDev,macalik/TITDev,tritanium-industries/TITDev,macalik/TITDev,tritanium-industries/TITDev,macalik/TITDev
|
from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
Add json to accepted celery content
celery.conf.CELERY_ACCEPT_CONTENT accepts only json
|
from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_ACCEPT_CONTENT = ["json", "application/json"]
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
|
<commit_before>from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
<commit_msg>Add json to accepted celery content
celery.conf.CELERY_ACCEPT_CONTENT accepts only json<commit_after>
|
from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_ACCEPT_CONTENT = ["json", "application/json"]
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
|
from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
Add json to accepted celery content
celery.conf.CELERY_ACCEPT_CONTENT accepts only jsonfrom flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_ACCEPT_CONTENT = ["json", "application/json"]
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
|
<commit_before>from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
<commit_msg>Add json to accepted celery content
celery.conf.CELERY_ACCEPT_CONTENT accepts only json<commit_after>from flask import g
from app import app, app_mongo
from celery import Celery
celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config["CELERY_RESULT_BACKEND"])
celery.conf.update(app.config)
task_base = celery.Task
class ContextTask(task_base):
abstract = True
def __call__(self, *args, **kwargs):
with app.app_context():
# Application context for databases
g.mongo = app_mongo
return task_base.__call__(self, *args, **kwargs)
celery.Task = ContextTask
# Security Concerns (http://docs.celeryproject.org/en/latest/faq.html#is-celery-dependent-on-pickle)
celery.conf.CELERY_ACCEPT_CONTENT = ["json", "application/json"]
celery.conf.CELERY_TASK_SERIALIZER = "json"
celery.conf.CELERY_RESULT_SERIALIZER = "json"
|
c09e01d6d7b98d2f2b0a99fea20988d422c1a1bd
|
test_collision/test_worlds.py
|
test_collision/test_worlds.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
pass
def test_ctor(self):
pass
def tearDown(self):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
self.solver = bullet.btSequentialImpulseConstraintSolver()
self.cinfo = bullet.btDefaultCollisionConstructionInfo()
self.collision_config = \
bullet.btDefaultCollisionConfiguration(self.cinfo)
self.broadphase = bullet.btDbvtBroadphase()
self.dispatcher = bullet.btCollisionDispatcher(self.collision_config)
self.world = bullet.btDiscreteDynamicsWorld(
self.dispatcher,
self.broadphase,
self.solver,
self.collision_config
)
self.time_step = 1.0/60
def test_ctor(self):
pass
def test_step(self):
for i in range(120):
self.world.step_simulation(self.time_step)
def test_sync_states(self):
for i in range(120):
self.world.step_simulation(self.time_step)
self.world.synchronize_motion_states()
def test_gravity(self):
self.world.set_gravity(bullet.btVector3(0, -9.8, 0))
gravity = self.world.get_gravity()
self.assertEquals(bullet.btVector3(0, -9.8, 0),
gravity)
self.world.gravity = bullet.btVector3(0, 0, 0)
self.assertEquals(self.world.get_gravity(),
bullet.btVector3(0, 0, 0))
def tearDown(self):
del self.world
del self.dispatcher
del self.broadphase
del self.collision_config
del self.cinfo
del self.solver
|
Add tests for implemented methods
|
Add tests for implemented methods
|
Python
|
mit
|
Klumhru/boost-python-bullet,Klumhru/boost-python-bullet,Klumhru/boost-python-bullet
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
pass
def test_ctor(self):
pass
def tearDown(self):
pass
Add tests for implemented methods
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
self.solver = bullet.btSequentialImpulseConstraintSolver()
self.cinfo = bullet.btDefaultCollisionConstructionInfo()
self.collision_config = \
bullet.btDefaultCollisionConfiguration(self.cinfo)
self.broadphase = bullet.btDbvtBroadphase()
self.dispatcher = bullet.btCollisionDispatcher(self.collision_config)
self.world = bullet.btDiscreteDynamicsWorld(
self.dispatcher,
self.broadphase,
self.solver,
self.collision_config
)
self.time_step = 1.0/60
def test_ctor(self):
pass
def test_step(self):
for i in range(120):
self.world.step_simulation(self.time_step)
def test_sync_states(self):
for i in range(120):
self.world.step_simulation(self.time_step)
self.world.synchronize_motion_states()
def test_gravity(self):
self.world.set_gravity(bullet.btVector3(0, -9.8, 0))
gravity = self.world.get_gravity()
self.assertEquals(bullet.btVector3(0, -9.8, 0),
gravity)
self.world.gravity = bullet.btVector3(0, 0, 0)
self.assertEquals(self.world.get_gravity(),
bullet.btVector3(0, 0, 0))
def tearDown(self):
del self.world
del self.dispatcher
del self.broadphase
del self.collision_config
del self.cinfo
del self.solver
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
pass
def test_ctor(self):
pass
def tearDown(self):
pass
<commit_msg>Add tests for implemented methods<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
self.solver = bullet.btSequentialImpulseConstraintSolver()
self.cinfo = bullet.btDefaultCollisionConstructionInfo()
self.collision_config = \
bullet.btDefaultCollisionConfiguration(self.cinfo)
self.broadphase = bullet.btDbvtBroadphase()
self.dispatcher = bullet.btCollisionDispatcher(self.collision_config)
self.world = bullet.btDiscreteDynamicsWorld(
self.dispatcher,
self.broadphase,
self.solver,
self.collision_config
)
self.time_step = 1.0/60
def test_ctor(self):
pass
def test_step(self):
for i in range(120):
self.world.step_simulation(self.time_step)
def test_sync_states(self):
for i in range(120):
self.world.step_simulation(self.time_step)
self.world.synchronize_motion_states()
def test_gravity(self):
self.world.set_gravity(bullet.btVector3(0, -9.8, 0))
gravity = self.world.get_gravity()
self.assertEquals(bullet.btVector3(0, -9.8, 0),
gravity)
self.world.gravity = bullet.btVector3(0, 0, 0)
self.assertEquals(self.world.get_gravity(),
bullet.btVector3(0, 0, 0))
def tearDown(self):
del self.world
del self.dispatcher
del self.broadphase
del self.collision_config
del self.cinfo
del self.solver
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
pass
def test_ctor(self):
pass
def tearDown(self):
pass
Add tests for implemented methods#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
self.solver = bullet.btSequentialImpulseConstraintSolver()
self.cinfo = bullet.btDefaultCollisionConstructionInfo()
self.collision_config = \
bullet.btDefaultCollisionConfiguration(self.cinfo)
self.broadphase = bullet.btDbvtBroadphase()
self.dispatcher = bullet.btCollisionDispatcher(self.collision_config)
self.world = bullet.btDiscreteDynamicsWorld(
self.dispatcher,
self.broadphase,
self.solver,
self.collision_config
)
self.time_step = 1.0/60
def test_ctor(self):
pass
def test_step(self):
for i in range(120):
self.world.step_simulation(self.time_step)
def test_sync_states(self):
for i in range(120):
self.world.step_simulation(self.time_step)
self.world.synchronize_motion_states()
def test_gravity(self):
self.world.set_gravity(bullet.btVector3(0, -9.8, 0))
gravity = self.world.get_gravity()
self.assertEquals(bullet.btVector3(0, -9.8, 0),
gravity)
self.world.gravity = bullet.btVector3(0, 0, 0)
self.assertEquals(self.world.get_gravity(),
bullet.btVector3(0, 0, 0))
def tearDown(self):
del self.world
del self.dispatcher
del self.broadphase
del self.collision_config
del self.cinfo
del self.solver
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
pass
def test_ctor(self):
pass
def tearDown(self):
pass
<commit_msg>Add tests for implemented methods<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_collision.test_worlds
"""
from __future__ import unicode_literals, print_function, absolute_import
import unittest
import bullet
class DiscreteDynamicsWorldTestCase(unittest.TestCase):
def setUp(self):
self.solver = bullet.btSequentialImpulseConstraintSolver()
self.cinfo = bullet.btDefaultCollisionConstructionInfo()
self.collision_config = \
bullet.btDefaultCollisionConfiguration(self.cinfo)
self.broadphase = bullet.btDbvtBroadphase()
self.dispatcher = bullet.btCollisionDispatcher(self.collision_config)
self.world = bullet.btDiscreteDynamicsWorld(
self.dispatcher,
self.broadphase,
self.solver,
self.collision_config
)
self.time_step = 1.0/60
def test_ctor(self):
pass
def test_step(self):
for i in range(120):
self.world.step_simulation(self.time_step)
def test_sync_states(self):
for i in range(120):
self.world.step_simulation(self.time_step)
self.world.synchronize_motion_states()
def test_gravity(self):
self.world.set_gravity(bullet.btVector3(0, -9.8, 0))
gravity = self.world.get_gravity()
self.assertEquals(bullet.btVector3(0, -9.8, 0),
gravity)
self.world.gravity = bullet.btVector3(0, 0, 0)
self.assertEquals(self.world.get_gravity(),
bullet.btVector3(0, 0, 0))
def tearDown(self):
del self.world
del self.dispatcher
del self.broadphase
del self.collision_config
del self.cinfo
del self.solver
|
d82e1ab6b94df47b9b9693cf09162f1f579e9eee
|
django_countries/widgets.py
|
django_countries/widgets.py
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = %s.replace('{code}', this.value.toLowerCase() || '__').replace('{code_upper}', this.value.toUpperCase() || '__');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER % settings.COUNTRIES_FLAG_URL
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
Use the original COUNTRIES_FLAG_URL string for the JS replace.
|
Use the original COUNTRIES_FLAG_URL string for the JS replace.
|
Python
|
mit
|
velfimov/django-countries,pimlie/django-countries,schinckel/django-countries,fladi/django-countries,SmileyChris/django-countries,jrfernandes/django-countries,rahimnathwani/django-countries
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
Use the original COUNTRIES_FLAG_URL string for the JS replace.
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = %s.replace('{code}', this.value.toLowerCase() || '__').replace('{code_upper}', this.value.toUpperCase() || '__');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER % settings.COUNTRIES_FLAG_URL
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
<commit_before>from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
<commit_msg>Use the original COUNTRIES_FLAG_URL string for the JS replace.<commit_after>
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = %s.replace('{code}', this.value.toLowerCase() || '__').replace('{code_upper}', this.value.toUpperCase() || '__');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER % settings.COUNTRIES_FLAG_URL
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
Use the original COUNTRIES_FLAG_URL string for the JS replace.from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = %s.replace('{code}', this.value.toLowerCase() || '__').replace('{code_upper}', this.value.toUpperCase() || '__');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER % settings.COUNTRIES_FLAG_URL
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
<commit_before>from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
<commit_msg>Use the original COUNTRIES_FLAG_URL string for the JS replace.<commit_after>from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = %s.replace('{code}', this.value.toLowerCase() || '__').replace('{code_upper}', this.value.toUpperCase() || '__');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER % settings.COUNTRIES_FLAG_URL
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
9d961fb5f50882de687278996365233dc0794123
|
scripts/get_images.py
|
scripts/get_images.py
|
#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../data/images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
|
#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../govhack2014/static/art_images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
|
Fix image download path, thanks @cmrn
|
Fix image download path, thanks @cmrn
|
Python
|
mit
|
makehackvoid/govhack2014,makehackvoid/govhack2014
|
#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../data/images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
Fix image download path, thanks @cmrn
|
#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../govhack2014/static/art_images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
|
<commit_before>#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../data/images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
<commit_msg>Fix image download path, thanks @cmrn<commit_after>
|
#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../govhack2014/static/art_images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
|
#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../data/images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
Fix image download path, thanks @cmrn#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../govhack2014/static/art_images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
|
<commit_before>#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../data/images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
<commit_msg>Fix image download path, thanks @cmrn<commit_after>#!/usr/bin/env python3
import requests
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
import os
def save_file(url, filename):
if (os.path.exists(filename)):
print(filename + ' already exists locally')
pass
urllib.request.urlretrieve(url, filename)
def get_filename(url):
relpath = urlparse(url).path
return os.path.split(relpath)[-1]
url = 'http://www.data.act.gov.au/resource/j746-krni.json'
data = requests.get(url).json()
filedir = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../govhack2014/static/art_images'))
if not os.path.exists(filedir):
os.makedirs(filedir)
download_list = []
for item in data:
title = item['title']
page = item['url_1']['url']
pic = item['url_2']['url']
filename = get_filename(pic)
download_list.append({'title': title, 'page': page, 'file': filename})
for item in download_list:
retry = requests.get(item['page']).text
data = BeautifulSoup(retry)
imageurl = data.find_all('div', {'id': 'artinfo'})[0].find_all('img')[0]['src']
save_file(imageurl, os.path.join(filedir, item['file']))
|
6461a380e6def18ce359139b416e2e1b93e60d57
|
searchapi/__init__.py
|
searchapi/__init__.py
|
import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.info("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.debug("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
Set config logging in init to debug
|
Set config logging in init to debug
|
Python
|
mit
|
LandRegistry/search-api-alpha,LandRegistry/search-api-alpha
|
import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.info("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
Set config logging in init to debug
|
import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.debug("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
<commit_before>import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.info("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
<commit_msg>Set config logging in init to debug<commit_after>
|
import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.debug("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.info("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
Set config logging in init to debugimport os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.debug("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
<commit_before>import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.info("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
<commit_msg>Set config logging in init to debug<commit_after>import os, logging
from flask import Flask
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
app.logger.debug("\nConfiguration\n%s\n" % app.config)
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
|
94d6ac50b4ce48aec51d5f32989d8d4aea938868
|
{{cookiecutter.repo_name}}/setup.py
|
{{cookiecutter.repo_name}}/setup.py
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
Set up console script for main
|
Set up console script for main
|
Python
|
mit
|
hackebrot/cookiedozer,hackebrot/cookiedozer
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
Set up console script for main
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
<commit_msg>Set up console script for main<commit_after>
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
Set up console script for mainimport os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
<commit_msg>Set up console script for main<commit_after>import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "{{cookiecutter.repo_name}}",
version = "{{cookiecutter.version}}",
author = "{{cookiecutter.full_name}}",
author_email = "{{cookiecutter.email}}",
description = "{{cookiecutter.short_description}}",
license = "MIT",
keywords=(
"Python, cookiecutter, kivy, buildozer, pytest, projects, project "
"templates, example, documentation, tutorial, setup.py, package, "
"android, touch, mobile, NUI"
),
url = "https://github.com/{{cookiecutter.github_username}}/{{cookiecutter.repo_name}}",
packages=find_packages(),
long_description=read('README.rst'),
install_requires = ['kivy>=1.8.0'],
package_data={
'{{cookiecutter.repo_name}}': ['*.kv*']
},
entry_points={
'console_scripts': [
'{{cookiecutter.repo_name}}={{cookiecutter.repo_name}}.main:main'
]
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: X11 Applications',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Artistic Software',
'Topic :: Multimedia :: Graphics :: Presentation',
'Topic :: Software Development :: User Interfaces',
],
)
|
165e05e9641841b696ea99484e02bfd5aa0d02c1
|
controllers/base_controller.py
|
controllers/base_controller.py
|
from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
|
from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def show_admin_nav(self):
"""Show admin navigation flag
This is just a boolean flag to help admin menu in top navigation bar
should show or not.
TODO: Make better implementation that show/hide admin menu
"""
return True
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
|
Move admin flag as separate method
|
Move admin flag as separate method
|
Python
|
mit
|
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
|
from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
Move admin flag as separate method
|
from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def show_admin_nav(self):
"""Show admin navigation flag
This is just a boolean flag to help admin menu in top navigation bar
should show or not.
TODO: Make better implementation that show/hide admin menu
"""
return True
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
|
<commit_before>from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
<commit_msg>Move admin flag as separate method<commit_after>
|
from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def show_admin_nav(self):
"""Show admin navigation flag
This is just a boolean flag to help admin menu in top navigation bar
should show or not.
TODO: Make better implementation that show/hide admin menu
"""
return True
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
|
from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
Move admin flag as separate methodfrom flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def show_admin_nav(self):
"""Show admin navigation flag
This is just a boolean flag to help admin menu in top navigation bar
should show or not.
TODO: Make better implementation that show/hide admin menu
"""
return True
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
|
<commit_before>from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
<commit_msg>Move admin flag as separate method<commit_after>from flask import g
from flask.views import View
class BaseController(View):
"""Base Controller
The base controller of this module, it shows how to make
'class based controller' with 'Flask Pluggable view'.
You can make controller inherited from BaseController
not from Flask.View
class ExampleController(BaseController):
# your methods
"""
methods = []
decorators = []
def show_admin_nav(self):
"""Show admin navigation flag
This is just a boolean flag to help admin menu in top navigation bar
should show or not.
TODO: Make better implementation that show/hide admin menu
"""
return True
def is_form_valid(self, form):
"""Validate form
It wraps the flask-wtf `validate_on_submit`, it needs flask-wtf
form object to be passed in.
"""
return form.validate_on_submit()
def get_site_data(self):
"""Placeholder of site data
It gets site data from `site_setting` decorator.
"""
return g.school
def dispatch_request(self):
pass
|
0b64ca640cff92a4e01d68b91a6f3147cc22ebd4
|
myuw_mobile/logger/logresp.py
|
myuw_mobile/logger/logresp.py
|
from myuw_mobile.dao.gws import Member
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if member.is_seattle_student():
res += ' Seattle'
if member.is_bothell_student():
res += ' Bothell'
if member.is_tacoma_student():
res += ' Tacoma'
res += ') '
return res
|
from myuw_mobile.dao.gws import Member
from myuw_mobile.dao.sws import Schedule
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
campuses = Schedule().get_cur_quarter_campuses()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if campuses['seattle']:
res += ' Seattle'
if campuses['bothell']:
res += ' Bothell'
if campuses['tacoma']:
res += ' Tacoma'
res += ') '
return res
|
Switch to use Schedule for identifying campus.
|
Switch to use Schedule for identifying campus.
|
Python
|
apache-2.0
|
fanglinfang/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,fanglinfang/myuw,fanglinfang/myuw
|
from myuw_mobile.dao.gws import Member
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if member.is_seattle_student():
res += ' Seattle'
if member.is_bothell_student():
res += ' Bothell'
if member.is_tacoma_student():
res += ' Tacoma'
res += ') '
return res
Switch to use Schedule for identifying campus.
|
from myuw_mobile.dao.gws import Member
from myuw_mobile.dao.sws import Schedule
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
campuses = Schedule().get_cur_quarter_campuses()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if campuses['seattle']:
res += ' Seattle'
if campuses['bothell']:
res += ' Bothell'
if campuses['tacoma']:
res += ' Tacoma'
res += ') '
return res
|
<commit_before>from myuw_mobile.dao.gws import Member
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if member.is_seattle_student():
res += ' Seattle'
if member.is_bothell_student():
res += ' Bothell'
if member.is_tacoma_student():
res += ' Tacoma'
res += ') '
return res
<commit_msg>Switch to use Schedule for identifying campus.<commit_after>
|
from myuw_mobile.dao.gws import Member
from myuw_mobile.dao.sws import Schedule
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
campuses = Schedule().get_cur_quarter_campuses()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if campuses['seattle']:
res += ' Seattle'
if campuses['bothell']:
res += ' Bothell'
if campuses['tacoma']:
res += ' Tacoma'
res += ') '
return res
|
from myuw_mobile.dao.gws import Member
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if member.is_seattle_student():
res += ' Seattle'
if member.is_bothell_student():
res += ' Bothell'
if member.is_tacoma_student():
res += ' Tacoma'
res += ') '
return res
Switch to use Schedule for identifying campus.from myuw_mobile.dao.gws import Member
from myuw_mobile.dao.sws import Schedule
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
campuses = Schedule().get_cur_quarter_campuses()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if campuses['seattle']:
res += ' Seattle'
if campuses['bothell']:
res += ' Bothell'
if campuses['tacoma']:
res += ' Tacoma'
res += ') '
return res
|
<commit_before>from myuw_mobile.dao.gws import Member
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if member.is_seattle_student():
res += ' Seattle'
if member.is_bothell_student():
res += ' Bothell'
if member.is_tacoma_student():
res += ' Tacoma'
res += ') '
return res
<commit_msg>Switch to use Schedule for identifying campus.<commit_after>from myuw_mobile.dao.gws import Member
from myuw_mobile.dao.sws import Schedule
from myuw_mobile.logger.logback import log_time
def log_response_time(logger, message, timer):
log_time(logger, message, timer)
def log_success_response(logger, timer):
log_time(logger,
get_identity() + 'fulfilled',
timer)
def log_data_not_found_response(logger, timer):
log_time(logger,
get_identity() + ' data not found',
timer)
def log_invalid_netid_response(logger, timer):
log_time(logger, 'invalid netid, abort', timer)
def log_invalid_regid_response(logger, timer):
log_time(logger, 'invalid regid, abort', timer)
def get_identity():
"""
Return "(<affiliations>, <campus codes>)"
"""
res = "("
member = Member()
campuses = Schedule().get_cur_quarter_campuses()
if member.is_grad_student():
res += ' Grad'
if member.is_undergrad_student():
res += ' Undergrad'
if member.is_pce_student():
res += ' Pce'
if member.is_student_employee():
res += ' StudEmployee'
res += ','
if campuses['seattle']:
res += ' Seattle'
if campuses['bothell']:
res += ' Bothell'
if campuses['tacoma']:
res += ' Tacoma'
res += ') '
return res
|
5178318df905ed1a68d312adb3936e8748789b2b
|
tests/test_views.py
|
tests/test_views.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def tearDown(self):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = views.check_database('foo')
self.assertFalse(response['foo']['ok'])
self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
def tearDown(self):
pass
|
Test exception handling in `check_database`
|
Test exception handling in `check_database`
|
Python
|
bsd-3-clause
|
JBKahn/django-watchman,mwarkentin/django-watchman,mwarkentin/django-watchman,ulope/django-watchman,gerlachry/django-watchman,blag/django-watchman,JBKahn/django-watchman,blag/django-watchman,gerlachry/django-watchman,ulope/django-watchman
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def tearDown(self):
pass
Test exception handling in `check_database`
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = views.check_database('foo')
self.assertFalse(response['foo']['ok'])
self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
def tearDown(self):
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def tearDown(self):
pass
<commit_msg>Test exception handling in `check_database`<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = views.check_database('foo')
self.assertFalse(response['foo']['ok'])
self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
def tearDown(self):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def tearDown(self):
pass
Test exception handling in `check_database`#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = views.check_database('foo')
self.assertFalse(response['foo']['ok'])
self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
def tearDown(self):
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def tearDown(self):
pass
<commit_msg>Test exception handling in `check_database`<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-watchman
------------
Tests for `django-watchman` views module.
"""
import json
import unittest
from mock import patch
from watchman import views
class TestWatchman(unittest.TestCase):
def setUp(self):
pass
@patch('watchman.views.check_databases')
def test_response_content_type_json(self, patched_check_databases):
patched_check_databases.return_value = []
response = views.status('')
self.assertEqual(response['Content-Type'], 'application/json')
@patch('watchman.views.check_databases')
def test_response_contains_expected_checks(self, patched_check_databases):
expected_checks = ['databases']
patched_check_databases.return_value = []
response = views.status('')
content = json.loads(response.content)
self.assertItemsEqual(expected_checks, content.keys())
def test_check_database_handles_exception(self):
response = views.check_database('foo')
self.assertFalse(response['foo']['ok'])
self.assertEqual(response['foo']['error'], "The connection foo doesn't exist")
def tearDown(self):
pass
|
3f317fd63bb5b0b762661c112a8d27075b705d92
|
openpassword/keychain_item.py
|
openpassword/keychain_item.py
|
from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
def _decrypt(self, data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
|
from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
@staticmethod
def _decrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
|
Make KeychainItem _decode method static
|
Make KeychainItem _decode method static
|
Python
|
mit
|
openpassword/blimey,openpassword/blimey
|
from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
def _decrypt(self, data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
Make KeychainItem _decode method static
|
from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
@staticmethod
def _decrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
|
<commit_before>from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
def _decrypt(self, data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
<commit_msg>Make KeychainItem _decode method static<commit_after>
|
from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
@staticmethod
def _decrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
|
from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
def _decrypt(self, data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
Make KeychainItem _decode method staticfrom Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
@staticmethod
def _decrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
|
<commit_before>from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
def _decrypt(self, data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
<commit_msg>Make KeychainItem _decode method static<commit_after>from Crypto.Cipher import AES
from base64 import b64decode
import json
from openpassword.pkcs_utils import strip_byte_padding
from openpassword.openssl_utils import derive_openssl_key
class KeychainItem:
def __init__(self, item):
self.encrypted = b64decode(item["encrypted"])
def decrypt(self, decryption_key):
key = self._derive_decryption_key(decryption_key)
data = self._decrypt(self.encrypted[16:], key)
data = strip_byte_padding(data)
return json.loads(data.decode('utf8'))
def _derive_decryption_key(self, decryption_key):
return derive_openssl_key(decryption_key, self.encrypted[8:16])
@staticmethod
def _decrypt(data, key_iv):
key = key_iv[0:16]
iv = key_iv[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return cipher.decrypt(data)
|
34423fd3b4b8b4a6e257388740002343e34806ff
|
Scripts/Build.py
|
Scripts/Build.py
|
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -clean -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
|
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
|
Remove clean command from build system
|
Remove clean command from build system
|
Python
|
bsd-3-clause
|
arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain,arbonagw/HeliumRain
|
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -clean -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
Remove clean command from build system
|
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
|
<commit_before>
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -clean -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
<commit_msg>Remove clean command from build system<commit_after>
|
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
|
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -clean -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
Remove clean command from build system
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
|
<commit_before>
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -clean -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
<commit_msg>Remove clean command from build system<commit_after>
import os
import json
# Get project settings
projectData = open("project.json")
projectConfig = json.load(projectData)
buildVersion = projectConfig["version"]
buildCultures = projectConfig["cultures"]
buildConfiguration = projectConfig["configuration"]
projectData.close()
# Get system settings
systemData = open("system.json")
systemConfig = json.load(systemData)
buildPlatform = systemConfig["platform"]
inputDir = systemConfig["inputDir"]
outputDir = systemConfig["outputDir"]
engineDir = systemConfig["engineDir"]
systemData.close()
# Generate paths
inputProject = os.path.join(inputDir, "HeliumRain.uproject")
buildTool = os.path.join(engineDir, "Engine", "Build", "BatchFiles", "RunUAT.bat")
# Generate full command line
commandLine = buildTool
commandLine += " BuildCookRun -project=" + inputProject + " -nocompile -nocompileeditor -installed -nop4 -clientconfig=" + buildConfiguration
commandLine += " -cook -allmaps -stage -archive -archivedirectory=" + outputDir
commandLine += " -package -ue4exe=UE4Editor-Cmd.exe -build -pak -prereqs -distribution -nodebuginfo -createreleaseversion=" + buildVersion
commandLine += " -utf8output -CookCultures=" + buildCultures
# Call
print(commandLine)
os.system(commandLine)
|
182a9498fd2ef5a6cc973ea42fc99b47505ae4f4
|
app/submitter/convert_payload_0_0_2.py
|
app/submitter/convert_payload_0_0_2.py
|
def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'block_id': 'relationships',
'answer_id': 'who-is-related',
'group_id': 'household-relationships',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
|
def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'answer_id': 'who-is-related',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
|
Remove group_id & block_id from payload docstring
|
Remove group_id & block_id from payload docstring
|
Python
|
mit
|
ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner
|
def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'block_id': 'relationships',
'answer_id': 'who-is-related',
'group_id': 'household-relationships',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
Remove group_id & block_id from payload docstring
|
def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'answer_id': 'who-is-related',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
|
<commit_before>def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'block_id': 'relationships',
'answer_id': 'who-is-related',
'group_id': 'household-relationships',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
<commit_msg>Remove group_id & block_id from payload docstring<commit_after>
|
def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'answer_id': 'who-is-related',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
|
def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'block_id': 'relationships',
'answer_id': 'who-is-related',
'group_id': 'household-relationships',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
Remove group_id & block_id from payload docstringdef convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'answer_id': 'who-is-related',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
|
<commit_before>def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'block_id': 'household-composition',
'answer_id': 'household-full-name',
'group_id': 'multiple-questions-group',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'block_id': 'relationships',
'answer_id': 'who-is-related',
'group_id': 'household-relationships',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
<commit_msg>Remove group_id & block_id from payload docstring<commit_after>def convert_answers_to_payload_0_0_2(answer_store, schema, routing_path):
"""
Convert answers into the data format below
'data': [
{
'value': 'Joe Bloggs',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 0
},
{
'value': 'Fred Flintstone',
'answer_id': 'household-full-name',
'group_instance': 0,
'answer_instance': 1
},
{
'value': 'Husband or wife',
'answer_id': 'who-is-related',
'group_instance': 0,
'answer_instance': 0
}
]
:param answer_store: questionnaire answers
:param routing_path: the path followed in the questionnaire
:return: data in a formatted form
"""
data = []
for location in routing_path:
answer_ids = schema.get_answer_ids_for_block(location.block_id)
answers_in_block = answer_store.filter(answer_ids, location.group_instance)
data.extend(answers_in_block)
return data
|
7447de560c064d251ec58ca35814f476005335ae
|
budgetsupervisor/transactions/forms.py
|
budgetsupervisor/transactions/forms.py
|
from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else Category.objects.get(name="Uncategorized")
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
|
from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
uncategorized = Category.objects.get(name="Uncategorized")
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else uncategorized
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
|
Reduce number of database queries
|
Reduce number of database queries
|
Python
|
mit
|
ltowarek/budget-supervisor
|
from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else Category.objects.get(name="Uncategorized")
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
Reduce number of database queries
|
from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
uncategorized = Category.objects.get(name="Uncategorized")
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else uncategorized
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
|
<commit_before>from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else Category.objects.get(name="Uncategorized")
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
<commit_msg>Reduce number of database queries<commit_after>
|
from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
uncategorized = Category.objects.get(name="Uncategorized")
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else uncategorized
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
|
from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else Category.objects.get(name="Uncategorized")
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
Reduce number of database queriesfrom django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
uncategorized = Category.objects.get(name="Uncategorized")
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else uncategorized
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
|
<commit_before>from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else Category.objects.get(name="Uncategorized")
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
<commit_msg>Reduce number of database queries<commit_after>from django import forms
from django.conf import settings
from django.utils.dateparse import parse_datetime
import os
from .saltedge import SaltEdge
from .models import Transaction, Category
from decimal import Decimal
class ImportTransactionsForm(forms.Form):
def import_transactions(self):
app = SaltEdge(os.environ["APP_ID"], os.environ["SECRET"], "transactions/private.pem")
url = "https://www.saltedge.com/api/v5/transactions?connection_id={}&account_id={}".format(os.environ["CONNECTION_ID"], os.environ["ACCOUNT_ID"])
response = app.get(url)
data = response.json()
uncategorized = Category.objects.get(name="Uncategorized")
for imported_transaction in data['data']:
imported_id = int(imported_transaction['id'])
escaped_category = imported_transaction["category"].replace("_", " ")
category = Category.objects.filter(name__iexact=escaped_category)
category = category[0] if category else uncategorized
t, created = Transaction.objects.update_or_create(
external_id=imported_id,
defaults={
"date": imported_transaction['made_on'],
"amount": imported_transaction['amount'],
"payee": "",
"category": category,
"description": imported_transaction['description'],
}
)
|
d58576bc658f1433351c0cf9ac0225537e17f472
|
cobe/brain.py
|
cobe/brain.py
|
# Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, TokenNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
# Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
Remove unused import of TokenNormalizer
|
Remove unused import of TokenNormalizer
Fixes the build
|
Python
|
mit
|
wodim/cobe-ng,LeMagnesium/cobe,pteichman/cobe,tiagochiavericosta/cobe,meska/cobe,pteichman/cobe,LeMagnesium/cobe,wodim/cobe-ng,tiagochiavericosta/cobe,DarkMio/cobe,meska/cobe,DarkMio/cobe
|
# Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, TokenNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
Remove unused import of TokenNormalizer
Fixes the build
|
# Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
<commit_before># Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, TokenNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
<commit_msg>Remove unused import of TokenNormalizer
Fixes the build<commit_after>
|
# Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
# Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, TokenNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
Remove unused import of TokenNormalizer
Fixes the build# Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
<commit_before># Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, TokenNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
<commit_msg>Remove unused import of TokenNormalizer
Fixes the build<commit_after># Copyright (C) 2012 Peter Teichman
import itertools
import logging
from cobe.analysis import (
AccentNormalizer, StemNormalizer, WhitespaceAnalyzer)
from cobe.kvstore import SqliteStore
from cobe.model import Model
from cobe.search import RandomWalkSearcher
log = logging.getLogger(__name__)
class StandardAnalyzer(WhitespaceAnalyzer):
"""A basic analyzer for test purposes.
This combines a whitespace tokenizer with AccentNormalizer.
"""
def __init__(self):
super(StandardAnalyzer, self).__init__()
self.add_token_normalizer(AccentNormalizer())
self.add_token_normalizer(StemNormalizer("english"))
class Brain(object):
"""The all-in-one interface to a cobe stack."""
def __init__(self, filename):
self.analyzer = StandardAnalyzer()
store = SqliteStore(filename)
self.model = Model(self.analyzer, store)
self.searcher = RandomWalkSearcher(self.model)
def reply(self, text):
# Create a search query from the input
query = self.analyzer.query(text, self.model)
result = itertools.islice(self.searcher.search(query), 1).next()
return self.analyzer.join(result)
def train(self, text):
pass
|
49bc3e16e260765b76cb1015aa655cc7f57055d2
|
benchmarks.py
|
benchmarks.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
cProfile.run(open('benches/benchmark_rust.py', 'rb'), 'benches/output_stats_rust')
rust = pstats.Stats('benches/output_stats_rust')
cProfile.run(open('benches/benchmark_python.py', 'rb'), 'benches/output_stats_python')
plain_python = pstats.Stats('benches/output_stats_python')
cProfile.run(open('benches/benchmark_cgg.py', 'rb'), 'benches/output_stats_cgg')
cpp = pstats.Stats('benches/output_stats_cgg')
print("Rust Benchmark\n")
rust.sort_stats('cumulative').print_stats(3)
print("Python Benchmark\n")
plain_python.sort_stats('cumulative').print_stats(3)
print("C++ Benchmark\n")
cpp.sort_stats('cumulative').print_stats(3)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
print("Calibrating system")
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
print("Calibration complete, running benchmarks")
bmarks = [
('benches/benchmark_rust.py', 'benches/output_stats_rust', 'Rust + Cython'),
('benches/benchmark_python.py', 'benches/output_stats_python', 'Pure Python'),
('benches/benchmark_cgg.py', 'benches/output_stats_cgg', 'C++')
]
results = []
for benchmark in bmarks:
cProfile.run(open(benchmark[0], 'rb'), benchmark[1])
results.append(pstats.Stats(benchmark[1]))
for i, benchmark in enumerate(bmarks):
print("%s Benchmark\n" % benchmark[2])
results[i].sort_stats('cumulative').print_stats(3)
|
Fix up disgraceful benchmark code
|
Fix up disgraceful benchmark code
|
Python
|
mit
|
urschrei/pypolyline,urschrei/pypolyline,urschrei/pypolyline
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
cProfile.run(open('benches/benchmark_rust.py', 'rb'), 'benches/output_stats_rust')
rust = pstats.Stats('benches/output_stats_rust')
cProfile.run(open('benches/benchmark_python.py', 'rb'), 'benches/output_stats_python')
plain_python = pstats.Stats('benches/output_stats_python')
cProfile.run(open('benches/benchmark_cgg.py', 'rb'), 'benches/output_stats_cgg')
cpp = pstats.Stats('benches/output_stats_cgg')
print("Rust Benchmark\n")
rust.sort_stats('cumulative').print_stats(3)
print("Python Benchmark\n")
plain_python.sort_stats('cumulative').print_stats(3)
print("C++ Benchmark\n")
cpp.sort_stats('cumulative').print_stats(3)
Fix up disgraceful benchmark code
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
print("Calibrating system")
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
print("Calibration complete, running benchmarks")
bmarks = [
('benches/benchmark_rust.py', 'benches/output_stats_rust', 'Rust + Cython'),
('benches/benchmark_python.py', 'benches/output_stats_python', 'Pure Python'),
('benches/benchmark_cgg.py', 'benches/output_stats_cgg', 'C++')
]
results = []
for benchmark in bmarks:
cProfile.run(open(benchmark[0], 'rb'), benchmark[1])
results.append(pstats.Stats(benchmark[1]))
for i, benchmark in enumerate(bmarks):
print("%s Benchmark\n" % benchmark[2])
results[i].sort_stats('cumulative').print_stats(3)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
cProfile.run(open('benches/benchmark_rust.py', 'rb'), 'benches/output_stats_rust')
rust = pstats.Stats('benches/output_stats_rust')
cProfile.run(open('benches/benchmark_python.py', 'rb'), 'benches/output_stats_python')
plain_python = pstats.Stats('benches/output_stats_python')
cProfile.run(open('benches/benchmark_cgg.py', 'rb'), 'benches/output_stats_cgg')
cpp = pstats.Stats('benches/output_stats_cgg')
print("Rust Benchmark\n")
rust.sort_stats('cumulative').print_stats(3)
print("Python Benchmark\n")
plain_python.sort_stats('cumulative').print_stats(3)
print("C++ Benchmark\n")
cpp.sort_stats('cumulative').print_stats(3)
<commit_msg>Fix up disgraceful benchmark code<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
print("Calibrating system")
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
print("Calibration complete, running benchmarks")
bmarks = [
('benches/benchmark_rust.py', 'benches/output_stats_rust', 'Rust + Cython'),
('benches/benchmark_python.py', 'benches/output_stats_python', 'Pure Python'),
('benches/benchmark_cgg.py', 'benches/output_stats_cgg', 'C++')
]
results = []
for benchmark in bmarks:
cProfile.run(open(benchmark[0], 'rb'), benchmark[1])
results.append(pstats.Stats(benchmark[1]))
for i, benchmark in enumerate(bmarks):
print("%s Benchmark\n" % benchmark[2])
results[i].sort_stats('cumulative').print_stats(3)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
cProfile.run(open('benches/benchmark_rust.py', 'rb'), 'benches/output_stats_rust')
rust = pstats.Stats('benches/output_stats_rust')
cProfile.run(open('benches/benchmark_python.py', 'rb'), 'benches/output_stats_python')
plain_python = pstats.Stats('benches/output_stats_python')
cProfile.run(open('benches/benchmark_cgg.py', 'rb'), 'benches/output_stats_cgg')
cpp = pstats.Stats('benches/output_stats_cgg')
print("Rust Benchmark\n")
rust.sort_stats('cumulative').print_stats(3)
print("Python Benchmark\n")
plain_python.sort_stats('cumulative').print_stats(3)
print("C++ Benchmark\n")
cpp.sort_stats('cumulative').print_stats(3)
Fix up disgraceful benchmark code#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
print("Calibrating system")
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
print("Calibration complete, running benchmarks")
bmarks = [
('benches/benchmark_rust.py', 'benches/output_stats_rust', 'Rust + Cython'),
('benches/benchmark_python.py', 'benches/output_stats_python', 'Pure Python'),
('benches/benchmark_cgg.py', 'benches/output_stats_cgg', 'C++')
]
results = []
for benchmark in bmarks:
cProfile.run(open(benchmark[0], 'rb'), benchmark[1])
results.append(pstats.Stats(benchmark[1]))
for i, benchmark in enumerate(bmarks):
print("%s Benchmark\n" % benchmark[2])
results[i].sort_stats('cumulative').print_stats(3)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
cProfile.run(open('benches/benchmark_rust.py', 'rb'), 'benches/output_stats_rust')
rust = pstats.Stats('benches/output_stats_rust')
cProfile.run(open('benches/benchmark_python.py', 'rb'), 'benches/output_stats_python')
plain_python = pstats.Stats('benches/output_stats_python')
cProfile.run(open('benches/benchmark_cgg.py', 'rb'), 'benches/output_stats_cgg')
cpp = pstats.Stats('benches/output_stats_cgg')
print("Rust Benchmark\n")
rust.sort_stats('cumulative').print_stats(3)
print("Python Benchmark\n")
plain_python.sort_stats('cumulative').print_stats(3)
print("C++ Benchmark\n")
cpp.sort_stats('cumulative').print_stats(3)
<commit_msg>Fix up disgraceful benchmark code<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Standalone benchmark runner
"""
import cProfile
import pstats
import profile
import numpy as np
print("Running Rust, Python, and C++ benchmarks. 100 points, 50 runs.\n")
# calibrate
print("Calibrating system")
pr = profile.Profile()
calibration = np.mean([pr.calibrate(10000) for x in xrange(5)])
# add the bias
profile.Profile.bias = calibration
print("Calibration complete, running benchmarks")
bmarks = [
('benches/benchmark_rust.py', 'benches/output_stats_rust', 'Rust + Cython'),
('benches/benchmark_python.py', 'benches/output_stats_python', 'Pure Python'),
('benches/benchmark_cgg.py', 'benches/output_stats_cgg', 'C++')
]
results = []
for benchmark in bmarks:
cProfile.run(open(benchmark[0], 'rb'), benchmark[1])
results.append(pstats.Stats(benchmark[1]))
for i, benchmark in enumerate(bmarks):
print("%s Benchmark\n" % benchmark[2])
results[i].sort_stats('cumulative').print_stats(3)
|
d4b962c599a751db46e4dec2ead9828a3529c453
|
getTwitter.py
|
getTwitter.py
|
import urllib2
from BeautifulSoup import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data'
userResponse = raw_input("Please enter the full URL from the Tweet page")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open('TwitterPage', 'a') as f:
f.write(html)
|
import time
import urllib2
from BeautifulSoup import *
# from bs4 import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data.'
print "Current date & time {}".format(time.strftime("%c"))
userResponse = raw_input("Please enter the full URL from the Tweet page: ")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open(time.strftime("%c"), 'a') as f:
f.write(html)
|
Call html page the date and time of get request
|
Call html page the date and time of get request
The html file that is outputted how has the current date and time as its name
|
Python
|
artistic-2.0
|
christaylortf/FinalYearProject
|
import urllib2
from BeautifulSoup import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data'
userResponse = raw_input("Please enter the full URL from the Tweet page")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open('TwitterPage', 'a') as f:
f.write(html)
Call html page the date and time of get request
The html file that is outputted how has the current date and time as its name
|
import time
import urllib2
from BeautifulSoup import *
# from bs4 import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data.'
print "Current date & time {}".format(time.strftime("%c"))
userResponse = raw_input("Please enter the full URL from the Tweet page: ")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open(time.strftime("%c"), 'a') as f:
f.write(html)
|
<commit_before>import urllib2
from BeautifulSoup import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data'
userResponse = raw_input("Please enter the full URL from the Tweet page")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open('TwitterPage', 'a') as f:
f.write(html)
<commit_msg>Call html page the date and time of get request
The html file that is outputted how has the current date and time as its name<commit_after>
|
import time
import urllib2
from BeautifulSoup import *
# from bs4 import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data.'
print "Current date & time {}".format(time.strftime("%c"))
userResponse = raw_input("Please enter the full URL from the Tweet page: ")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open(time.strftime("%c"), 'a') as f:
f.write(html)
|
import urllib2
from BeautifulSoup import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data'
userResponse = raw_input("Please enter the full URL from the Tweet page")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open('TwitterPage', 'a') as f:
f.write(html)
Call html page the date and time of get request
The html file that is outputted how has the current date and time as its nameimport time
import urllib2
from BeautifulSoup import *
# from bs4 import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data.'
print "Current date & time {}".format(time.strftime("%c"))
userResponse = raw_input("Please enter the full URL from the Tweet page: ")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open(time.strftime("%c"), 'a') as f:
f.write(html)
|
<commit_before>import urllib2
from BeautifulSoup import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data'
userResponse = raw_input("Please enter the full URL from the Tweet page")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open('TwitterPage', 'a') as f:
f.write(html)
<commit_msg>Call html page the date and time of get request
The html file that is outputted how has the current date and time as its name<commit_after>import time
import urllib2
from BeautifulSoup import *
# from bs4 import *
print 'Welcome to the Get Twitter tool. This tool will allow you to download a page from Twitter to be used to extract the data.'
print "Current date & time {}".format(time.strftime("%c"))
userResponse = raw_input("Please enter the full URL from the Tweet page: ")
response = urllib2.urlopen(userResponse)
html = response.read()
soup = BeautifulSoup(html)
with open(time.strftime("%c"), 'a') as f:
f.write(html)
|
dcddc500ec8ae45c1a33a43e1727cc38c7b7e001
|
blox/utils.py
|
blox/utils.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
try:
import ujson as json
except ImportError:
import json
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json.dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
import functools
try:
import ujson as json
json_dumps = json.dumps
except ImportError:
import json
json_dumps = functools.partial(json.dumps, separators=',:')
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json_dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
|
Write compact json when using built-in json.dumps
|
Write compact json when using built-in json.dumps
|
Python
|
mit
|
aldanor/blox
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
try:
import ujson as json
except ImportError:
import json
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json.dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
Write compact json when using built-in json.dumps
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
import functools
try:
import ujson as json
json_dumps = json.dumps
except ImportError:
import json
json_dumps = functools.partial(json.dumps, separators=',:')
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json_dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
try:
import ujson as json
except ImportError:
import json
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json.dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
<commit_msg>Write compact json when using built-in json.dumps<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
import functools
try:
import ujson as json
json_dumps = json.dumps
except ImportError:
import json
json_dumps = functools.partial(json.dumps, separators=',:')
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json_dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
try:
import ujson as json
except ImportError:
import json
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json.dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
Write compact json when using built-in json.dumps# -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
import functools
try:
import ujson as json
json_dumps = json.dumps
except ImportError:
import json
json_dumps = functools.partial(json.dumps, separators=',:')
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json_dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
try:
import ujson as json
except ImportError:
import json
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json.dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
<commit_msg>Write compact json when using built-in json.dumps<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import
import ast
import sys
import struct
import functools
try:
import ujson as json
json_dumps = json.dumps
except ImportError:
import json
json_dumps = functools.partial(json.dumps, separators=',:')
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
def flatten_dtype(dtype):
dtype = str(dtype)
if dtype.startswith(('{', '[')):
return ast.literal_eval(dtype)
return dtype
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json_dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
|
bc012979f86b9ccd0842ef721b86a7e72811942c
|
brink/urls.py
|
brink/urls.py
|
def GET(route, handler):
return ("GET", route, handler)
def POST(route, handler):
return ("POST", route, handler)
def PUT(route, handler):
return ("PUT", route, handler)
def PATCH(route, handler):
return ("PATCH", route, handler)
def DELETE(route, handler):
return ("DELETE", route, handler)
def WS(route, handler):
return ("WS", route, handler)
|
def get(route, handler):
return ("GET", route, handler)
def post(route, handler):
return ("POST", route, handler)
def put(route, handler):
return ("PUT", route, handler)
def patch(route, handler):
return ("PATCH", route, handler)
def delete(route, handler):
return ("DELETE", route, handler)
def ws(route, handler):
return ("WS", route, handler)
# TODO: deprecate and remove
GET = get
POST = post
PUT = put
PATCH = patch
DELETE = delete
WS = ws
|
Change HTTP verb function names to lower case
|
Change HTTP verb function names to lower case
|
Python
|
bsd-3-clause
|
brinkframework/brink
|
def GET(route, handler):
return ("GET", route, handler)
def POST(route, handler):
return ("POST", route, handler)
def PUT(route, handler):
return ("PUT", route, handler)
def PATCH(route, handler):
return ("PATCH", route, handler)
def DELETE(route, handler):
return ("DELETE", route, handler)
def WS(route, handler):
return ("WS", route, handler)
Change HTTP verb function names to lower case
|
def get(route, handler):
return ("GET", route, handler)
def post(route, handler):
return ("POST", route, handler)
def put(route, handler):
return ("PUT", route, handler)
def patch(route, handler):
return ("PATCH", route, handler)
def delete(route, handler):
return ("DELETE", route, handler)
def ws(route, handler):
return ("WS", route, handler)
# TODO: deprecate and remove
GET = get
POST = post
PUT = put
PATCH = patch
DELETE = delete
WS = ws
|
<commit_before>def GET(route, handler):
return ("GET", route, handler)
def POST(route, handler):
return ("POST", route, handler)
def PUT(route, handler):
return ("PUT", route, handler)
def PATCH(route, handler):
return ("PATCH", route, handler)
def DELETE(route, handler):
return ("DELETE", route, handler)
def WS(route, handler):
return ("WS", route, handler)
<commit_msg>Change HTTP verb function names to lower case<commit_after>
|
def get(route, handler):
return ("GET", route, handler)
def post(route, handler):
return ("POST", route, handler)
def put(route, handler):
return ("PUT", route, handler)
def patch(route, handler):
return ("PATCH", route, handler)
def delete(route, handler):
return ("DELETE", route, handler)
def ws(route, handler):
return ("WS", route, handler)
# TODO: deprecate and remove
GET = get
POST = post
PUT = put
PATCH = patch
DELETE = delete
WS = ws
|
def GET(route, handler):
return ("GET", route, handler)
def POST(route, handler):
return ("POST", route, handler)
def PUT(route, handler):
return ("PUT", route, handler)
def PATCH(route, handler):
return ("PATCH", route, handler)
def DELETE(route, handler):
return ("DELETE", route, handler)
def WS(route, handler):
return ("WS", route, handler)
Change HTTP verb function names to lower casedef get(route, handler):
return ("GET", route, handler)
def post(route, handler):
return ("POST", route, handler)
def put(route, handler):
return ("PUT", route, handler)
def patch(route, handler):
return ("PATCH", route, handler)
def delete(route, handler):
return ("DELETE", route, handler)
def ws(route, handler):
return ("WS", route, handler)
# TODO: deprecate and remove
GET = get
POST = post
PUT = put
PATCH = patch
DELETE = delete
WS = ws
|
<commit_before>def GET(route, handler):
return ("GET", route, handler)
def POST(route, handler):
return ("POST", route, handler)
def PUT(route, handler):
return ("PUT", route, handler)
def PATCH(route, handler):
return ("PATCH", route, handler)
def DELETE(route, handler):
return ("DELETE", route, handler)
def WS(route, handler):
return ("WS", route, handler)
<commit_msg>Change HTTP verb function names to lower case<commit_after>def get(route, handler):
return ("GET", route, handler)
def post(route, handler):
return ("POST", route, handler)
def put(route, handler):
return ("PUT", route, handler)
def patch(route, handler):
return ("PATCH", route, handler)
def delete(route, handler):
return ("DELETE", route, handler)
def ws(route, handler):
return ("WS", route, handler)
# TODO: deprecate and remove
GET = get
POST = post
PUT = put
PATCH = patch
DELETE = delete
WS = ws
|
f41b06ca9a61b75bdb6cef0a0c534755ca80a513
|
tests/unit/test_pathologic_models.py
|
tests/unit/test_pathologic_models.py
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this using soft failures.
"""
def grammar(): return ZeroOrMore(Optional('a'))
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this case.
"""
def grammar(): return ZeroOrMore(Optional('a')), EOF
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
|
Fix in test for pathologic grammars.
|
Fix in test for pathologic grammars.
|
Python
|
mit
|
leiyangyou/Arpeggio,leiyangyou/Arpeggio
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this using soft failures.
"""
def grammar(): return ZeroOrMore(Optional('a'))
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
Fix in test for pathologic grammars.
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this case.
"""
def grammar(): return ZeroOrMore(Optional('a')), EOF
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
|
<commit_before># -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this using soft failures.
"""
def grammar(): return ZeroOrMore(Optional('a'))
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
<commit_msg>Fix in test for pathologic grammars.<commit_after>
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this case.
"""
def grammar(): return ZeroOrMore(Optional('a')), EOF
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
|
# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this using soft failures.
"""
def grammar(): return ZeroOrMore(Optional('a'))
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
Fix in test for pathologic grammars.# -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this case.
"""
def grammar(): return ZeroOrMore(Optional('a')), EOF
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
|
<commit_before># -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this using soft failures.
"""
def grammar(): return ZeroOrMore(Optional('a'))
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
<commit_msg>Fix in test for pathologic grammars.<commit_after># -*- coding: utf-8 -*-
#######################################################################
# Name: test_pathologic_models
# Purpose: Test for grammar models that could lead to infinite loops are
# handled properly.
# Author: Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# Copyright: (c) 2014 Igor R. Dejanović <igor DOT dejanovic AT gmail DOT com>
# License: MIT License
#######################################################################
from __future__ import unicode_literals
import pytest
from arpeggio import ZeroOrMore, Optional, ParserPython, NoMatch, EOF
def test_optional_inside_zeroormore():
"""
Test optional match inside a zero or more.
Optional should always succeed thus inducing ZeroOrMore
to try the match again.
Arpeggio handle this case.
"""
def grammar(): return ZeroOrMore(Optional('a')), EOF
parser = ParserPython(grammar)
with pytest.raises(NoMatch):
# This could lead to infinite loop
parser.parse('b')
|
1f65142b754478570a3733f4c0abbf3ef24d9c7e
|
photutils/utils/_optional_deps.py
|
photutils/utils/_optional_deps.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
optional_deps = ['scipy', 'matplotlib', 'scikit-image', 'scikit-learn',
'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
# Note that in some cases the package names are different from the
# pip-install name (e.g.k scikit-image -> skimage).
optional_deps = ['scipy', 'matplotlib', 'skimage', 'sklearn', 'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
|
Fix for package name differences
|
Fix for package name differences
|
Python
|
bsd-3-clause
|
larrybradley/photutils,astropy/photutils
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
optional_deps = ['scipy', 'matplotlib', 'scikit-image', 'scikit-learn',
'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
Fix for package name differences
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
# Note that in some cases the package names are different from the
# pip-install name (e.g.k scikit-image -> skimage).
optional_deps = ['scipy', 'matplotlib', 'skimage', 'sklearn', 'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
optional_deps = ['scipy', 'matplotlib', 'scikit-image', 'scikit-learn',
'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
<commit_msg>Fix for package name differences<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
# Note that in some cases the package names are different from the
# pip-install name (e.g.k scikit-image -> skimage).
optional_deps = ['scipy', 'matplotlib', 'skimage', 'sklearn', 'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
optional_deps = ['scipy', 'matplotlib', 'scikit-image', 'scikit-learn',
'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
Fix for package name differences# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
# Note that in some cases the package names are different from the
# pip-install name (e.g.k scikit-image -> skimage).
optional_deps = ['scipy', 'matplotlib', 'skimage', 'sklearn', 'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
optional_deps = ['scipy', 'matplotlib', 'scikit-image', 'scikit-learn',
'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
<commit_msg>Fix for package name differences<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Checks for optional dependencies using lazy import from
`PEP 562 <https://www.python.org/dev/peps/pep-0562/>`_.
"""
import importlib
# This list is a duplicate of the dependencies in setup.cfg "all".
# Note that in some cases the package names are different from the
# pip-install name (e.g.k scikit-image -> skimage).
optional_deps = ['scipy', 'matplotlib', 'skimage', 'sklearn', 'gwcs']
deps = {key.upper(): key for key in optional_deps}
__all__ = [f'HAS_{pkg}' for pkg in deps]
def __getattr__(name):
if name in __all__:
try:
importlib.import_module(deps[name[4:]])
except (ImportError, ModuleNotFoundError):
return False
return True
raise AttributeError(f'Module {__name__!r} has no attribute {name!r}.')
|
120520e44d7dfcf3079bfdc9a118d28b5620cb14
|
polymorphic/formsets/utils.py
|
polymorphic/formsets/utils.py
|
"""
Internal utils
"""
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
dest.add_css(media._css)
dest.add_js(media._js)
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
dest += media
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
Fix `add_media` util for Django 2.0
|
Fix `add_media` util for Django 2.0
Neither `add_css` nor `add_js` exist in Django 2.0 because the method
for adding `Media` classes together has changed.
Ref: https://github.com/django/django/commit/c19b56f633e172b3c02094cbe12d28865ee57772
|
Python
|
bsd-3-clause
|
chrisglass/django_polymorphic,chrisglass/django_polymorphic
|
"""
Internal utils
"""
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
dest.add_css(media._css)
dest.add_js(media._js)
Fix `add_media` util for Django 2.0
Neither `add_css` nor `add_js` exist in Django 2.0 because the method
for adding `Media` classes together has changed.
Ref: https://github.com/django/django/commit/c19b56f633e172b3c02094cbe12d28865ee57772
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
dest += media
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
<commit_before>"""
Internal utils
"""
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
dest.add_css(media._css)
dest.add_js(media._js)
<commit_msg>Fix `add_media` util for Django 2.0
Neither `add_css` nor `add_js` exist in Django 2.0 because the method
for adding `Media` classes together has changed.
Ref: https://github.com/django/django/commit/c19b56f633e172b3c02094cbe12d28865ee57772<commit_after>
|
"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
dest += media
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
"""
Internal utils
"""
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
dest.add_css(media._css)
dest.add_js(media._js)
Fix `add_media` util for Django 2.0
Neither `add_css` nor `add_js` exist in Django 2.0 because the method
for adding `Media` classes together has changed.
Ref: https://github.com/django/django/commit/c19b56f633e172b3c02094cbe12d28865ee57772"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
dest += media
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
<commit_before>"""
Internal utils
"""
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
"""
dest.add_css(media._css)
dest.add_js(media._js)
<commit_msg>Fix `add_media` util for Django 2.0
Neither `add_css` nor `add_js` exist in Django 2.0 because the method
for adding `Media` classes together has changed.
Ref: https://github.com/django/django/commit/c19b56f633e172b3c02094cbe12d28865ee57772<commit_after>"""
Internal utils
"""
import django
def add_media(dest, media):
"""
Optimized version of django.forms.Media.__add__() that doesn't create new objects.
Only required for Django < 2.0
"""
if django.VERSION >= (2, 0):
dest += media
else:
dest.add_css(media._css)
dest.add_js(media._js)
|
0dbab7d21f6faf2091590c138e64d5f00f094eb5
|
name/urls.py
|
name/urls.py
|
from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json/$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json/$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
|
from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
|
Remove trailing slashes from JSON endpoints.
|
Remove trailing slashes from JSON endpoints.
|
Python
|
bsd-3-clause
|
unt-libraries/django-name,unt-libraries/django-name,unt-libraries/django-name
|
from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json/$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json/$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
Remove trailing slashes from JSON endpoints.
|
from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
|
<commit_before>from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json/$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json/$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
<commit_msg>Remove trailing slashes from JSON endpoints.<commit_after>
|
from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
|
from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json/$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json/$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
Remove trailing slashes from JSON endpoints.from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
|
<commit_before>from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json/$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json/$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
<commit_msg>Remove trailing slashes from JSON endpoints.<commit_after>from django.conf.urls import url
from django.contrib import admin
from . import views, feeds
from .api import views as api
admin.autodiscover()
app_name = 'name'
urlpatterns = [
url(r'^$', views.landing, name='landing'),
url(r'about/$', views.about, name='about'),
url(r'export/$', views.export, name='export'),
url(r'feed/$', feeds.NameAtomFeed(), name='feed'),
url(r'label/(?P<name_value>.*)$', views.label, name='label'),
url(r'locations.json$', api.locations_json, name='locations-json'),
url(r'map/$', views.locations, name='map'),
url(r'opensearch.xml$', views.opensearch, name='opensearch'),
url(r'search/$', views.SearchView.as_view(), name='search'),
url(r'search.json$', api.search_json, name="search-json"),
url(r'stats.json$', api.stats_json, name='stats-json'),
url(r'stats/$', views.stats, name='stats'),
url(r'(?P<name_id>.*).json$', api.name_json, name='detail-json'),
url(r'(?P<name_id>.*).mads.xml$', views.mads_serialize, name='mads-serialize'),
url(r'(?P<name_id>[^/]+)/', views.detail, name='detail')
]
|
2ee763ae1e4564a57692cb7161f99daab4ae77b7
|
cookiecutter/main.py
|
cookiecutter/main.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .cleanup import remove_repo
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
got_repo_arg = True
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
# Remove repo if Cookiecutter cloned it in the first place.
# Here the user just wants a project, not a project template.
if got_repo_arg:
generated_project = context['project']['repo_name']
remove_repo(repo_dir, generated_project)
if __name__ == '__main__':
main()
|
Clean up after cloned repo if needed. (partial checkin)
|
Clean up after cloned repo if needed. (partial checkin)
|
Python
|
bsd-3-clause
|
atlassian/cookiecutter,dajose/cookiecutter,luzfcb/cookiecutter,foodszhang/cookiecutter,Springerle/cookiecutter,willingc/cookiecutter,utek/cookiecutter,takeflight/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,stevepiercy/cookiecutter,vincentbernat/cookiecutter,cguardia/cookiecutter,0k/cookiecutter,nhomar/cookiecutter,janusnic/cookiecutter,benthomasson/cookiecutter,0k/cookiecutter,foodszhang/cookiecutter,kkujawinski/cookiecutter,ionelmc/cookiecutter,michaeljoseph/cookiecutter,sp1rs/cookiecutter,jhermann/cookiecutter,lucius-feng/cookiecutter,agconti/cookiecutter,michaeljoseph/cookiecutter,ramiroluz/cookiecutter,letolab/cookiecutter,hackebrot/cookiecutter,audreyr/cookiecutter,sp1rs/cookiecutter,kkujawinski/cookiecutter,vincentbernat/cookiecutter,agconti/cookiecutter,lucius-feng/cookiecutter,drgarcia1986/cookiecutter,drgarcia1986/cookiecutter,atlassian/cookiecutter,ionelmc/cookiecutter,lgp171188/cookiecutter,audreyr/cookiecutter,nhomar/cookiecutter,alex/cookiecutter,cichm/cookiecutter,moi65/cookiecutter,lgp171188/cookiecutter,hackebrot/cookiecutter,tylerdave/cookiecutter,terryjbates/cookiecutter,stevepiercy/cookiecutter,janusnic/cookiecutter,utek/cookiecutter,moi65/cookiecutter,christabor/cookiecutter,vintasoftware/cookiecutter,alex/cookiecutter,cichm/cookiecutter,tylerdave/cookiecutter,letolab/cookiecutter,Vauxoo/cookiecutter,Springerle/cookiecutter,jhermann/cookiecutter,vintasoftware/cookiecutter,cguardia/cookiecutter,Vauxoo/cookiecutter,benthomasson/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,christabor/cookiecutter,venumech/cookiecutter,luzfcb/cookiecutter,terryjbates/cookiecutter,pjbull/cookiecutter,ramiroluz/cookiecutter,venumech/cookiecutter
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
if __name__ == '__main__':
main()
Clean up after cloned repo if needed. (partial checkin)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .cleanup import remove_repo
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
got_repo_arg = True
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
# Remove repo if Cookiecutter cloned it in the first place.
# Here the user just wants a project, not a project template.
if got_repo_arg:
generated_project = context['project']['repo_name']
remove_repo(repo_dir, generated_project)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
if __name__ == '__main__':
main()
<commit_msg>Clean up after cloned repo if needed. (partial checkin)<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .cleanup import remove_repo
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
got_repo_arg = True
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
# Remove repo if Cookiecutter cloned it in the first place.
# Here the user just wants a project, not a project template.
if got_repo_arg:
generated_project = context['project']['repo_name']
remove_repo(repo_dir, generated_project)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
if __name__ == '__main__':
main()
Clean up after cloned repo if needed. (partial checkin)#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .cleanup import remove_repo
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
got_repo_arg = True
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
# Remove repo if Cookiecutter cloned it in the first place.
# Here the user just wants a project, not a project template.
if got_repo_arg:
generated_project = context['project']['repo_name']
remove_repo(repo_dir, generated_project)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
if __name__ == '__main__':
main()
<commit_msg>Clean up after cloned repo if needed. (partial checkin)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.main
-----------------
Main entry point for the `cookiecutter` command.
The code in this module is also a good example of how to use Cookiecutter as a
library rather than a script.
"""
import argparse
import os
from .cleanup import remove_repo
from .find import find_template
from .generate import generate_context, generate_files
from .vcs import git_clone
def main():
""" Entry point for the package, as defined in setup.py. """
# Get command line input/output arguments
parser = argparse.ArgumentParser(
description='Create a project from a Cookiecutter project template.'
)
parser.add_argument(
'input_dir',
help='Cookiecutter project template dir, e.g. {{project.repo_name}}/'
)
args = parser.parse_args()
# If it's a git repo, clone and prompt
if args.input_dir.endswith('.git'):
got_repo_arg = True
repo_dir = git_clone(args.input_dir)
project_template = find_template(repo_dir)
os.chdir(repo_dir)
else:
project_template = args.input_dir
# Create project from local context and project template.
context = generate_context()
generate_files(
input_dir=project_template,
context=context
)
# Remove repo if Cookiecutter cloned it in the first place.
# Here the user just wants a project, not a project template.
if got_repo_arg:
generated_project = context['project']['repo_name']
remove_repo(repo_dir, generated_project)
if __name__ == '__main__':
main()
|
5dcd5da3674294388c068a55942e8974eb7aa75a
|
bh_sshRcmd.py
|
bh_sshRcmd.py
|
#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()
|
#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()
|
FIX FUNCTIONS. CHANGE USAGE FUNCTION TO BANNER. ADD ACTUAL USAGE FILE. FIX 'main' FUNCTION
|
TODO: FIX FUNCTIONS. CHANGE USAGE FUNCTION TO BANNER. ADD ACTUAL USAGE FILE. FIX 'main' FUNCTION
|
Python
|
mit
|
n1cfury/BlackHatPython
|
#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()TODO: FIX FUNCTIONS. CHANGE USAGE FUNCTION TO BANNER. ADD ACTUAL USAGE FILE. FIX 'main' FUNCTION
|
#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()
|
<commit_before>#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()<commit_msg>TODO: FIX FUNCTIONS. CHANGE USAGE FUNCTION TO BANNER. ADD ACTUAL USAGE FILE. FIX 'main' FUNCTION<commit_after>
|
#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()
|
#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()TODO: FIX FUNCTIONS. CHANGE USAGE FUNCTION TO BANNER. ADD ACTUAL USAGE FILE. FIX 'main' FUNCTION#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()
|
<commit_before>#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()<commit_msg>TODO: FIX FUNCTIONS. CHANGE USAGE FUNCTION TO BANNER. ADD ACTUAL USAGE FILE. FIX 'main' FUNCTION<commit_after>#!/usr/bin/env python
#SSH with Paramiko pg 27
import threading, paramiko, subprocess
def usage(): #Provide description of program
print "Black Hat Python SSH with Paramiko pg 27"
print ""
print "Enter Syntax or information for how program works"
print ""
sys.exit(0)
def main()
if not len(sys.argv[1:]):
usage()
def ssh_command(ip, user, passwd, command):
client = paramiko.SSHClient()
#client.load_host_keys('/home/justin/.ssh/known_hosts')
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(ip, username= user, password= passwd)
ssh_session = client.get_transport().open_session()
if ssh_session.active:
ssh_session.exec_command(command)
print ssh_session.recv(1024) #read banner
while True:
command = ssh_session.recv(1024) #get the command from the SSH server
try:
cmd_output = subprocess.check_output(command, shell=True)
ssh_session.send(cmd_output)
except Exception,e:
ssh_session.send(str(e))
client.close()
return
ssh_command('192.168.1.59', 'justin', 'lovesthepython','ClientConnected')
main()
|
cb0b742ad05adceb6b58a71ca5d1e33985145a54
|
servicerating/urls.py
|
servicerating/urls.py
|
from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)
|
from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)
|
Add missing end of file line break
|
Add missing end of file line break
|
Python
|
bsd-3-clause
|
praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control
|
from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)Add missing end of file line break
|
from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)
|
<commit_before>from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)<commit_msg>Add missing end of file line break<commit_after>
|
from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)
|
from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)Add missing end of file line breakfrom django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)
|
<commit_before>from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)<commit_msg>Add missing end of file line break<commit_after>from django.conf.urls import patterns, url, include
from servicerating import api
from tastypie.api import Api
# Setting the API base name and registering the API resources using
# Tastypies API function
api_resources = Api(api_name='v1/servicerating')
api_resources.register(api.ContactResource())
api_resources.register(api.ConversationResource())
api_resources.register(api.ResponseResource())
api_resources.register(api.UserAccountResource())
api_resources.register(api.ExtraResource())
api_resources.register(api.ServiceRatingResource())
api_resources.prepend_urls()
# Setting the urlpatterns to hook into the api urls
urlpatterns = patterns('',
url(r'^api/', include(api_resources.urls))
)
|
8be5a5bcbd228599ce7a4f226638feb3dc3318a8
|
python/examples/encode_message.py
|
python/examples/encode_message.py
|
#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
|
#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
print(message)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print('')
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
|
Print the message in the encode example.
|
Print the message in the encode example.
|
Python
|
mit
|
PointOneNav/fusion-engine-client,PointOneNav/fusion-engine-client,PointOneNav/fusion-engine-client
|
#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
Print the message in the encode example.
|
#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
print(message)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print('')
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
|
<commit_before>#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
<commit_msg>Print the message in the encode example.<commit_after>
|
#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
print(message)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print('')
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
|
#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
Print the message in the encode example.#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
print(message)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print('')
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
|
<commit_before>#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
<commit_msg>Print the message in the encode example.<commit_after>#!/usr/bin/env python3
import os
import sys
# Add the Python root directory (fusion-engine-client/python/) to the import search path to enable FusionEngine imports
# if this application is being run directly out of the repository and is not installed as a pip package.
root_dir = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
sys.path.insert(0, root_dir)
from fusion_engine_client.messages import *
from fusion_engine_client.parsers import FusionEngineEncoder
from fusion_engine_client.utils.argument_parser import ArgumentParser
from fusion_engine_client.utils.bin_utils import bytes_to_hex
if __name__ == "__main__":
parser = ArgumentParser(description="""\
Encode a FusionEngine message and print the resulting binary content to the
console.
""")
options = parser.parse_args()
# Enable FusionEngine PoseMessage output on UART1
message = SetMessageRate(output_interface=InterfaceID(TransportType.SERIAL, 1),
protocol=ProtocolType.FUSION_ENGINE,
message_id=MessageType.POSE,
rate=MessageRate.ON_CHANGE)
print(message)
encoder = FusionEngineEncoder()
encoded_data = encoder.encode_message(message)
print('')
print(bytes_to_hex(encoded_data, bytes_per_row=16, bytes_per_col=2))
|
14ebd4a0e570102e97fb65bbb0813d85e763c743
|
plyer/facades/notification.py
|
plyer/facades/notification.py
|
'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
|
'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
.. note::
When called on Windows, ``app_icon`` has to be a path to
a file in .ICO format.
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
|
Add note about Windows icon format
|
Add note about Windows icon format
|
Python
|
mit
|
KeyWeeUsr/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer,kivy/plyer,kivy/plyer
|
'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
Add note about Windows icon format
|
'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
.. note::
When called on Windows, ``app_icon`` has to be a path to
a file in .ICO format.
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
|
<commit_before>'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
<commit_msg>Add note about Windows icon format<commit_after>
|
'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
.. note::
When called on Windows, ``app_icon`` has to be a path to
a file in .ICO format.
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
|
'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
Add note about Windows icon format'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
.. note::
When called on Windows, ``app_icon`` has to be a path to
a file in .ICO format.
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
|
<commit_before>'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
<commit_msg>Add note about Windows icon format<commit_after>'''
Notification
===========
The :class:`Notification` provides access to public methods to create
notifications.
Simple Examples
---------------
To send notification::
>>> from plyer import notification
>>> title = 'plyer'
>>> message = 'This is an example.'
>>> notification.notify(title=title, message=message)
'''
class Notification(object):
'''
Notification facade.
'''
def notify(self, title='', message='', app_name='', app_icon='',
timeout=10, ticker=''):
'''
Send a notification.
:param title: Title of the notification
:param message: Message of the notification
:param app_name: Name of the app launching this notification
:param app_icon: Icon to be displayed along with the message
:param timeout: time to display the message for, defaults to 10
:param ticker: text to display on status bar as the notification
arrives
:type title: str
:type message: str
:type app_name: str
:type app_icon: str
:type timeout: int
:type ticker: str
.. note::
When called on Windows, ``app_icon`` has to be a path to
a file in .ICO format.
'''
self._notify(title=title, message=message, app_icon=app_icon,
app_name=app_name, timeout=timeout, ticker=ticker)
# private
def _notify(self, **kwargs):
raise NotImplementedError("No usable implementation found!")
|
d53b05f648053d46c6b4b7353d9acb96d6c18179
|
inventory/models.py
|
inventory/models.py
|
import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
size = models.CharField(_("size"), max_length=5)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
|
import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
size = models.CharField(_("size"), max_length=5)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
|
Move size from UserTshirt to Tshirt
|
inventory: Move size from UserTshirt to Tshirt
|
Python
|
mit
|
PyConPune/pune.pycon.org,PyConPune/pune.pycon.org,PyConPune/pune.pycon.org
|
import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
size = models.CharField(_("size"), max_length=5)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
inventory: Move size from UserTshirt to Tshirt
|
import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
size = models.CharField(_("size"), max_length=5)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
|
<commit_before>import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
size = models.CharField(_("size"), max_length=5)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
<commit_msg>inventory: Move size from UserTshirt to Tshirt<commit_after>
|
import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
size = models.CharField(_("size"), max_length=5)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
|
import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
size = models.CharField(_("size"), max_length=5)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
inventory: Move size from UserTshirt to Tshirtimport uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
size = models.CharField(_("size"), max_length=5)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
|
<commit_before>import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
size = models.CharField(_("size"), max_length=5)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
<commit_msg>inventory: Move size from UserTshirt to Tshirt<commit_after>import uuid
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext as _
from symposion.conference.models import Conference
from root.models import Base
class Tshirt(Base):
""" Model to store the different types of tshirt. """
gender = models.CharField(_("gender"), max_length=255)
size = models.CharField(_("size"), max_length=5)
limit = models.PositiveIntegerField(_("limit"), default=0)
price = models.PositiveIntegerField(_("price"), default=0, db_index=True)
conference = models.ForeignKey(Conference, verbose_name=_("conference"))
class Meta:
verbose_name = _("tshirt")
verbose_name_plural = _("tshirts")
def __unicode(self):
return u"%s: %s" % (self.conference.title, self.gender)
class UserTshirt(Base):
""" Model for maitaining the tshirt order entry for all the Users. """
uuid = models.UUIDField(default=uuid.uuid4, editable=False)
user = models.ForeignKey(User, on_delete=models.CASCADE)
tshirt = models.ForeignKey(Tshirt, on_delete=models.CASCADE)
class Meta:
verbose_name = _("user tshirt")
verbose_name_plural = _("tshirt")
ordering = ['-timestamp']
def __unicode__(self):
return u'%s:%s:%s' % (self.user.username, self.tshirt.gender, self.size)
|
3a42b33124d6036dacee85867e484cb25d32a903
|
IPython/terminal/pt_inputhooks/qt.py
|
IPython/terminal/pt_inputhooks/qt.py
|
import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
def inputhook(context):
app = QtCore.QCoreApplication.instance()
if not app:
return
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
|
import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
# If we create a QApplication, keep a reference to it so that it doesn't get
# garbage collected.
_appref = None
def inputhook(context):
global _appref
app = QtCore.QCoreApplication.instance()
if not app:
_appref = app = QtGui.QApplication([" "])
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
|
Create a QApplication for inputhook if one doesn't already exist
|
Create a QApplication for inputhook if one doesn't already exist
Closes gh-9784
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
def inputhook(context):
app = QtCore.QCoreApplication.instance()
if not app:
return
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
Create a QApplication for inputhook if one doesn't already exist
Closes gh-9784
|
import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
# If we create a QApplication, keep a reference to it so that it doesn't get
# garbage collected.
_appref = None
def inputhook(context):
global _appref
app = QtCore.QCoreApplication.instance()
if not app:
_appref = app = QtGui.QApplication([" "])
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
|
<commit_before>import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
def inputhook(context):
app = QtCore.QCoreApplication.instance()
if not app:
return
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
<commit_msg>Create a QApplication for inputhook if one doesn't already exist
Closes gh-9784<commit_after>
|
import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
# If we create a QApplication, keep a reference to it so that it doesn't get
# garbage collected.
_appref = None
def inputhook(context):
global _appref
app = QtCore.QCoreApplication.instance()
if not app:
_appref = app = QtGui.QApplication([" "])
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
|
import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
def inputhook(context):
app = QtCore.QCoreApplication.instance()
if not app:
return
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
Create a QApplication for inputhook if one doesn't already exist
Closes gh-9784import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
# If we create a QApplication, keep a reference to it so that it doesn't get
# garbage collected.
_appref = None
def inputhook(context):
global _appref
app = QtCore.QCoreApplication.instance()
if not app:
_appref = app = QtGui.QApplication([" "])
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
|
<commit_before>import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
def inputhook(context):
app = QtCore.QCoreApplication.instance()
if not app:
return
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
<commit_msg>Create a QApplication for inputhook if one doesn't already exist
Closes gh-9784<commit_after>import sys
from IPython.external.qt_for_kernel import QtCore, QtGui
# If we create a QApplication, keep a reference to it so that it doesn't get
# garbage collected.
_appref = None
def inputhook(context):
global _appref
app = QtCore.QCoreApplication.instance()
if not app:
_appref = app = QtGui.QApplication([" "])
event_loop = QtCore.QEventLoop(app)
if sys.platform == 'win32':
# The QSocketNotifier method doesn't appear to work on Windows.
# Use polling instead.
timer = QtCore.QTimer()
timer.timeout.connect(event_loop.quit)
while not context.input_is_ready():
timer.start(50) # 50 ms
event_loop.exec_()
timer.stop()
else:
# On POSIX platforms, we can use a file descriptor to quit the event
# loop when there is input ready to read.
notifier = QtCore.QSocketNotifier(context.fileno(), QtCore.QSocketNotifier.Read)
notifier.setEnabled(True)
notifier.activated.connect(event_loop.exit)
event_loop.exec_()
|
d95dd9d3acbd56fd91b67cdfcc1fa9d1758770eb
|
lino_noi/lib/tickets/__init__.py
|
lino_noi/lib/tickets/__init__.py
|
# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
|
# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
def setup_main_menu(self, site, profile, m):
p = self.get_menu_group()
m = m.add_menu(p.app_label, p.verbose_name)
m.add_action('tickets.MyTicketsToWork')
def get_dashboard_items(self, user):
if user.authenticated:
yield self.site.actors.tickets.MyTicketsToWork
# else:
# yield self.site.actors.tickets. PublicTickets
|
Move asigned menu and dashboard items to noi/tickets
|
Move asigned menu and dashboard items to noi/tickets
|
Python
|
bsd-2-clause
|
khchine5/noi,lsaffre/noi,lsaffre/noi,khchine5/noi,lsaffre/noi,lino-framework/noi,lino-framework/noi
|
# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
Move asigned menu and dashboard items to noi/tickets
|
# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
def setup_main_menu(self, site, profile, m):
p = self.get_menu_group()
m = m.add_menu(p.app_label, p.verbose_name)
m.add_action('tickets.MyTicketsToWork')
def get_dashboard_items(self, user):
if user.authenticated:
yield self.site.actors.tickets.MyTicketsToWork
# else:
# yield self.site.actors.tickets. PublicTickets
|
<commit_before># -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
<commit_msg>Move asigned menu and dashboard items to noi/tickets<commit_after>
|
# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
def setup_main_menu(self, site, profile, m):
p = self.get_menu_group()
m = m.add_menu(p.app_label, p.verbose_name)
m.add_action('tickets.MyTicketsToWork')
def get_dashboard_items(self, user):
if user.authenticated:
yield self.site.actors.tickets.MyTicketsToWork
# else:
# yield self.site.actors.tickets. PublicTickets
|
# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
Move asigned menu and dashboard items to noi/tickets# -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
def setup_main_menu(self, site, profile, m):
p = self.get_menu_group()
m = m.add_menu(p.app_label, p.verbose_name)
m.add_action('tickets.MyTicketsToWork')
def get_dashboard_items(self, user):
if user.authenticated:
yield self.site.actors.tickets.MyTicketsToWork
# else:
# yield self.site.actors.tickets. PublicTickets
|
<commit_before># -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
<commit_msg>Move asigned menu and dashboard items to noi/tickets<commit_after># -*- coding: UTF-8 -*-
# Copyright 2016 Luc Saffre
# License: BSD (see file COPYING for details)
"""Fixtures specific for the Team variant of Lino Noi.
.. autosummary::
:toctree:
models
"""
from lino_xl.lib.tickets import *
class Plugin(Plugin):
"""Adds the :mod:`lino_xl.lib.votes` plugin.
"""
extends_models = ['Ticket']
needs_plugins = [
'lino_xl.lib.excerpts',
'lino_xl.lib.topics',
'lino.modlib.comments', 'lino.modlib.changes',
# 'lino_xl.lib.votes',
'lino_noi.lib.noi']
def setup_main_menu(self, site, profile, m):
p = self.get_menu_group()
m = m.add_menu(p.app_label, p.verbose_name)
m.add_action('tickets.MyTicketsToWork')
def get_dashboard_items(self, user):
if user.authenticated:
yield self.site.actors.tickets.MyTicketsToWork
# else:
# yield self.site.actors.tickets. PublicTickets
|
2848badf17fd77138ee9e0b3999805e7e60d24c0
|
tests/builtins/test_dict.py
|
tests/builtins/test_dict.py
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_frozenset',
'test_list',
'test_str',
]
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_list',
'test_str',
]
|
Fix “Unexpected success” by removing ‘test_frozenset’ from BuiltinDictFunctionTests.not_implementeed
|
Fix “Unexpected success” by removing ‘test_frozenset’ from BuiltinDictFunctionTests.not_implementeed
|
Python
|
bsd-3-clause
|
cflee/voc,freakboy3742/voc,freakboy3742/voc,cflee/voc
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_frozenset',
'test_list',
'test_str',
]
Fix “Unexpected success” by removing ‘test_frozenset’ from BuiltinDictFunctionTests.not_implementeed
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_list',
'test_str',
]
|
<commit_before>from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_frozenset',
'test_list',
'test_str',
]
<commit_msg>Fix “Unexpected success” by removing ‘test_frozenset’ from BuiltinDictFunctionTests.not_implementeed<commit_after>
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_list',
'test_str',
]
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_frozenset',
'test_list',
'test_str',
]
Fix “Unexpected success” by removing ‘test_frozenset’ from BuiltinDictFunctionTests.not_implementeedfrom .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_list',
'test_str',
]
|
<commit_before>from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_frozenset',
'test_list',
'test_str',
]
<commit_msg>Fix “Unexpected success” by removing ‘test_frozenset’ from BuiltinDictFunctionTests.not_implementeed<commit_after>from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class DictTests(TranspileTestCase):
pass
class BuiltinDictFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["dict"]
not_implemented = [
'test_bytearray',
'test_list',
'test_str',
]
|
535dbef3caf4130cc8543be4aa54c8ce820a5b56
|
tests/builtins/test_list.py
|
tests/builtins/test_list.py
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_str',
]
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_frozenset',
'test_str',
]
|
Mark some builtin list() tests as passing
|
Mark some builtin list() tests as passing
- This is due to the earlier fixed TypeError message.
|
Python
|
bsd-3-clause
|
cflee/voc,glasnt/voc,pombredanne/voc,gEt-rIgHt-jR/voc,ASP1234/voc,Felix5721/voc,freakboy3742/voc,pombredanne/voc,Felix5721/voc,freakboy3742/voc,gEt-rIgHt-jR/voc,glasnt/voc,cflee/voc,ASP1234/voc
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_str',
]
Mark some builtin list() tests as passing
- This is due to the earlier fixed TypeError message.
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_frozenset',
'test_str',
]
|
<commit_before>from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_str',
]
<commit_msg>Mark some builtin list() tests as passing
- This is due to the earlier fixed TypeError message.<commit_after>
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_frozenset',
'test_str',
]
|
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_str',
]
Mark some builtin list() tests as passing
- This is due to the earlier fixed TypeError message.from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_frozenset',
'test_str',
]
|
<commit_before>from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bool',
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_float',
'test_frozenset',
'test_int',
'test_str',
]
<commit_msg>Mark some builtin list() tests as passing
- This is due to the earlier fixed TypeError message.<commit_after>from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class ListTests(TranspileTestCase):
pass
class BuiltinListFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["list"]
not_implemented = [
'test_bytearray',
'test_bytes',
'test_class',
'test_complex',
'test_dict',
'test_frozenset',
'test_str',
]
|
2d25e6e70df357d19d9e873d94ac57d25bd7e6aa
|
local.py
|
local.py
|
import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
icon=noticeIcon
)
|
import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Local Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Local Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
)
|
Disable icon temporarily and adjust the debug print statements
|
Disable icon temporarily and adjust the debug print statements
|
Python
|
mit
|
kfdm/gntp-regrowl
|
import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
icon=noticeIcon
)Disable icon temporarily and adjust the debug print statements
|
import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Local Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Local Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
)
|
<commit_before>import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
icon=noticeIcon
)<commit_msg>Disable icon temporarily and adjust the debug print statements<commit_after>
|
import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Local Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Local Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
)
|
import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
icon=noticeIcon
)Disable icon temporarily and adjust the debug print statementsimport gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Local Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Local Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
)
|
<commit_before>import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
icon=noticeIcon
)<commit_msg>Disable icon temporarily and adjust the debug print statements<commit_after>import gntp
import Growl
class GNTPRegister(gntp.GNTPRegister):
def send(self):
print 'Sending Local Registration'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = self.notifications,
defaultNotifications = self.defaultNotifications,
)
growl.register()
class GNTPNotice(gntp.GNTPNotice):
def send(self):
print 'Sending Local Notification'
growl = Growl.GrowlNotifier(
applicationName = self.headers['Application-Name'],
notifications = [self.headers['Notification-Name']]
)
noticeIcon = None
if self.headers.get('Notification-Icon',False):
resource = self.headers['Notification-Icon'].split('://')
#print resource
resource = self.resources.get(resource[1],False)
#print resource
if resource:
noticeIcon = resource['Data']
growl.notify(
noteType = self.headers['Notification-Name'],
title = self.headers['Notification-Title'],
description=self.headers['Notification-Text'],
)
|
12e6ba386a733bd38105aacfa2d0304ac94ade67
|
tests/qtgui/qwidget_test.py
|
tests/qtgui/qwidget_test.py
|
import unittest
from PySide.QtGui import QWidget
from helper import UsesQApplication
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
|
import unittest
from PySide.QtGui import QWidget, QMainWindow
from helper import UsesQApplication
class QWidgetInherit(QMainWindow):
def __init__(self):
QWidget.__init__(self)
class QWidgetTest(UsesQApplication):
def testInheritance(self):
newobj = QWidgetInherit()
widget = QWidget()
newobj.setCentralWidget(widget)
self.assertEqual(widget, newobj.centralWidget())
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
|
Test a specific situation that causes python segfault.
|
Test a specific situation that causes python segfault.
Reviewer: Marcelo Lira <6be3b93b2f09145ada72571578cc4097e4ba9a9e@openbossa.org>
Renato Araújo <renato.filho@openbossa.org>
|
Python
|
lgpl-2.1
|
M4rtinK/pyside-bb10,enthought/pyside,enthought/pyside,RobinD42/pyside,qtproject/pyside-pyside,enthought/pyside,M4rtinK/pyside-android,IronManMark20/pyside2,IronManMark20/pyside2,gbaty/pyside2,enthought/pyside,pankajp/pyside,IronManMark20/pyside2,RobinD42/pyside,PySide/PySide,gbaty/pyside2,BadSingleton/pyside2,M4rtinK/pyside-bb10,IronManMark20/pyside2,enthought/pyside,M4rtinK/pyside-android,pankajp/pyside,M4rtinK/pyside-android,qtproject/pyside-pyside,IronManMark20/pyside2,RobinD42/pyside,gbaty/pyside2,qtproject/pyside-pyside,M4rtinK/pyside-android,RobinD42/pyside,BadSingleton/pyside2,gbaty/pyside2,qtproject/pyside-pyside,M4rtinK/pyside-android,RobinD42/pyside,PySide/PySide,M4rtinK/pyside-bb10,M4rtinK/pyside-bb10,enthought/pyside,M4rtinK/pyside-android,PySide/PySide,BadSingleton/pyside2,BadSingleton/pyside2,qtproject/pyside-pyside,PySide/PySide,enthought/pyside,M4rtinK/pyside-bb10,RobinD42/pyside,RobinD42/pyside,pankajp/pyside,PySide/PySide,gbaty/pyside2,pankajp/pyside,pankajp/pyside,M4rtinK/pyside-bb10,BadSingleton/pyside2
|
import unittest
from PySide.QtGui import QWidget
from helper import UsesQApplication
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
Test a specific situation that causes python segfault.
Reviewer: Marcelo Lira <6be3b93b2f09145ada72571578cc4097e4ba9a9e@openbossa.org>
Renato Araújo <renato.filho@openbossa.org>
|
import unittest
from PySide.QtGui import QWidget, QMainWindow
from helper import UsesQApplication
class QWidgetInherit(QMainWindow):
def __init__(self):
QWidget.__init__(self)
class QWidgetTest(UsesQApplication):
def testInheritance(self):
newobj = QWidgetInherit()
widget = QWidget()
newobj.setCentralWidget(widget)
self.assertEqual(widget, newobj.centralWidget())
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
|
<commit_before>
import unittest
from PySide.QtGui import QWidget
from helper import UsesQApplication
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
<commit_msg>Test a specific situation that causes python segfault.
Reviewer: Marcelo Lira <6be3b93b2f09145ada72571578cc4097e4ba9a9e@openbossa.org>
Renato Araújo <renato.filho@openbossa.org><commit_after>
|
import unittest
from PySide.QtGui import QWidget, QMainWindow
from helper import UsesQApplication
class QWidgetInherit(QMainWindow):
def __init__(self):
QWidget.__init__(self)
class QWidgetTest(UsesQApplication):
def testInheritance(self):
newobj = QWidgetInherit()
widget = QWidget()
newobj.setCentralWidget(widget)
self.assertEqual(widget, newobj.centralWidget())
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
|
import unittest
from PySide.QtGui import QWidget
from helper import UsesQApplication
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
Test a specific situation that causes python segfault.
Reviewer: Marcelo Lira <6be3b93b2f09145ada72571578cc4097e4ba9a9e@openbossa.org>
Renato Araújo <renato.filho@openbossa.org>
import unittest
from PySide.QtGui import QWidget, QMainWindow
from helper import UsesQApplication
class QWidgetInherit(QMainWindow):
def __init__(self):
QWidget.__init__(self)
class QWidgetTest(UsesQApplication):
def testInheritance(self):
newobj = QWidgetInherit()
widget = QWidget()
newobj.setCentralWidget(widget)
self.assertEqual(widget, newobj.centralWidget())
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
|
<commit_before>
import unittest
from PySide.QtGui import QWidget
from helper import UsesQApplication
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
<commit_msg>Test a specific situation that causes python segfault.
Reviewer: Marcelo Lira <6be3b93b2f09145ada72571578cc4097e4ba9a9e@openbossa.org>
Renato Araújo <renato.filho@openbossa.org><commit_after>
import unittest
from PySide.QtGui import QWidget, QMainWindow
from helper import UsesQApplication
class QWidgetInherit(QMainWindow):
def __init__(self):
QWidget.__init__(self)
class QWidgetTest(UsesQApplication):
def testInheritance(self):
newobj = QWidgetInherit()
widget = QWidget()
newobj.setCentralWidget(widget)
self.assertEqual(widget, newobj.centralWidget())
class QWidgetVisible(UsesQApplication):
def testBasic(self):
# Also related to bug #244, on existence of setVisible'''
widget = QWidget()
self.assert_(not widget.isVisible())
widget.setVisible(True)
self.assert_(widget.isVisible())
if __name__ == '__main__':
unittest.main()
|
a780d95b76404e46f5060d830fb95b0179716569
|
ports/esp32/modules/inisetup.py
|
ports/esp32/modules/inisetup.py
|
import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsFat.mkfs(bdev)
vfs = uos.VfsFat(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
|
import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsLfs2.mkfs(bdev)
vfs = uos.VfsLfs2(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
|
Change from FAT to littlefs v2 as default filesystem.
|
esp32: Change from FAT to littlefs v2 as default filesystem.
This commit changes the default filesystem type for esp32 to littlefs v2.
This port already enables both VfsFat and VfsLfs2, so either can be used
for the filesystem, and existing systems that use FAT will still work.
|
Python
|
mit
|
pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython
|
import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsFat.mkfs(bdev)
vfs = uos.VfsFat(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
esp32: Change from FAT to littlefs v2 as default filesystem.
This commit changes the default filesystem type for esp32 to littlefs v2.
This port already enables both VfsFat and VfsLfs2, so either can be used
for the filesystem, and existing systems that use FAT will still work.
|
import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsLfs2.mkfs(bdev)
vfs = uos.VfsLfs2(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
|
<commit_before>import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsFat.mkfs(bdev)
vfs = uos.VfsFat(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
<commit_msg>esp32: Change from FAT to littlefs v2 as default filesystem.
This commit changes the default filesystem type for esp32 to littlefs v2.
This port already enables both VfsFat and VfsLfs2, so either can be used
for the filesystem, and existing systems that use FAT will still work.<commit_after>
|
import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsLfs2.mkfs(bdev)
vfs = uos.VfsLfs2(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
|
import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsFat.mkfs(bdev)
vfs = uos.VfsFat(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
esp32: Change from FAT to littlefs v2 as default filesystem.
This commit changes the default filesystem type for esp32 to littlefs v2.
This port already enables both VfsFat and VfsLfs2, so either can be used
for the filesystem, and existing systems that use FAT will still work.import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsLfs2.mkfs(bdev)
vfs = uos.VfsLfs2(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
|
<commit_before>import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsFat.mkfs(bdev)
vfs = uos.VfsFat(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
<commit_msg>esp32: Change from FAT to littlefs v2 as default filesystem.
This commit changes the default filesystem type for esp32 to littlefs v2.
This port already enables both VfsFat and VfsLfs2, so either can be used
for the filesystem, and existing systems that use FAT will still work.<commit_after>import uos
from flashbdev import bdev
def check_bootsec():
buf = bytearray(bdev.ioctl(5, 0)) # 5 is SEC_SIZE
bdev.readblocks(0, buf)
empty = True
for b in buf:
if b != 0xFF:
empty = False
break
if empty:
return True
fs_corrupted()
def fs_corrupted():
import time
while 1:
print(
"""\
FAT filesystem appears to be corrupted. If you had important data there, you
may want to make a flash snapshot to try to recover it. Otherwise, perform
factory reprogramming of MicroPython firmware (completely erase flash, followed
by firmware programming).
"""
)
time.sleep(3)
def setup():
check_bootsec()
print("Performing initial setup")
uos.VfsLfs2.mkfs(bdev)
vfs = uos.VfsLfs2(bdev)
uos.mount(vfs, "/")
with open("boot.py", "w") as f:
f.write(
"""\
# This file is executed on every boot (including wake-boot from deepsleep)
#import esp
#esp.osdebug(None)
#import webrepl
#webrepl.start()
"""
)
return vfs
|
2ea8fa8d0f00e8b9b52e6af9fdfeb1db7dcd0787
|
coveragespace/__init__.py
|
coveragespace/__init__.py
|
"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'http://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
|
"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'https://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
|
Use SSL for API calls
|
Use SSL for API calls
|
Python
|
mit
|
jacebrowning/coverage-space-cli
|
"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'http://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
Use SSL for API calls
|
"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'https://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
|
<commit_before>"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'http://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
<commit_msg>Use SSL for API calls<commit_after>
|
"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'https://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
|
"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'http://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
Use SSL for API calls"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'https://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
|
<commit_before>"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'http://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
<commit_msg>Use SSL for API calls<commit_after>"""Package for coverage.space-cli."""
import sys
__project__ = 'coverage.space'
__version__ = '0.6.1'
API = 'https://api.coverage.space'
VERSION = "{0} v{1}".format(__project__, __version__)
|
a0342631d6888f4748af9011839020ee0843a721
|
crypto_enigma/_version.py
|
crypto_enigma/_version.py
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev1' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev2' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
|
Update test version after test release
|
Update test version after test release
|
Python
|
bsd-3-clause
|
orome/crypto-enigma-py
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev1' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
Update test version after test release
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev2' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
|
<commit_before>#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev1' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
<commit_msg>Update test version after test release<commit_after>
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev2' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
|
#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev1' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
Update test version after test release#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev2' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
|
<commit_before>#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev1' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
<commit_msg>Update test version after test release<commit_after>#!/usr/bin/env python
# encoding: utf8
from __future__ import (absolute_import, print_function, division, unicode_literals)
# See - http://www.python.org/dev/peps/pep-0440/
# See - http://semver.org
__author__ = 'Roy Levien'
__copyright__ = '(c) 2014-2015 Roy Levien'
__release__ = '0.2.1' # N(.N)*
__pre_release__ = 'b3' # aN | bN | cN |
__suffix__ = '.dev2' # .devN | | .postN
__version__ = __release__ + __pre_release__ + __suffix__
|
639eb2da0e239d362b3416e9137b9dcee8da1c87
|
setup.py
|
setup.py
|
# -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
# -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst', format='markdown')
except Exception:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
Fix long-description reader, make it fail silently
|
Fix long-description reader, make it fail silently
|
Python
|
mit
|
frecar/django-basis
|
# -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
Fix long-description reader, make it fail silently
|
# -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst', format='markdown')
except Exception:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
<commit_before># -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
<commit_msg>Fix long-description reader, make it fail silently<commit_after>
|
# -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst', format='markdown')
except Exception:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
# -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
Fix long-description reader, make it fail silently# -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst', format='markdown')
except Exception:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
<commit_before># -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except ImportError:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
<commit_msg>Fix long-description reader, make it fail silently<commit_after># -*- coding: utf8 -*-
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
def _read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst', format='markdown')
except Exception:
return None
setup(
name="django-basis",
version='0.3.2',
url='http://github.com/frecar/django-basis',
author='Fredrik Carlsen',
author_email='fredrik@carlsen.io',
description='Simple reusable django app for basic model functionality',
long_description=_read_long_description(),
packages=find_packages(exclude='tests'),
tests_require=[
'django>=1.4',
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
381a266830bbc92bdf6cacb9e4a1ff7044c07c19
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='rest-server',
version='0.1.0',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
|
from distutils.core import setup
setup(
name='rest-server',
version='0.2.2',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json', 'rest.*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
|
Increment version ; certs to package
|
Increment version ; certs to package
|
Python
|
apache-2.0
|
boundary/rest-server
|
from distutils.core import setup
setup(
name='rest-server',
version='0.1.0',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
Increment version ; certs to package
|
from distutils.core import setup
setup(
name='rest-server',
version='0.2.2',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json', 'rest.*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
|
<commit_before>from distutils.core import setup
setup(
name='rest-server',
version='0.1.0',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
<commit_msg>Increment version ; certs to package<commit_after>
|
from distutils.core import setup
setup(
name='rest-server',
version='0.2.2',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json', 'rest.*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
|
from distutils.core import setup
setup(
name='rest-server',
version='0.1.0',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
Increment version ; certs to packagefrom distutils.core import setup
setup(
name='rest-server',
version='0.2.2',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json', 'rest.*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
|
<commit_before>from distutils.core import setup
setup(
name='rest-server',
version='0.1.0',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
<commit_msg>Increment version ; certs to package<commit_after>from distutils.core import setup
setup(
name='rest-server',
version='0.2.2',
url="http://github.io/boundary/rest-server",
author='David Gwartney',
author_email='david_gwartney@bmc.com',
packages=['restserver', ],
entry_points={
'console_scripts': [
'rest-server = restserver.app:main',
]
},
package_data={'restserver': ['*.json', 'rest.*']},
license='LICENSE',
description='TrueSight Pulse Meter Plugin REST Target',
long_description=open('README.txt').read(),
install_requires=[
"Flask >= 0.10.1"
],
)
|
ce8f864d3254acc19595e35dcb0b1e75efeb6b34
|
setup.py
|
setup.py
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
Make sure ext.csrf is installed with WTForms
|
Make sure ext.csrf is installed with WTForms
|
Python
|
bsd-3-clause
|
Khan/wtforms
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
Make sure ext.csrf is installed with WTForms
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
<commit_before>import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
<commit_msg>Make sure ext.csrf is installed with WTForms<commit_after>
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
Make sure ext.csrf is installed with WTFormsimport os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
<commit_before>import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
<commit_msg>Make sure ext.csrf is installed with WTForms<commit_after>import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
cc997a7fd67306891fa5a0a73700712505286be1
|
setup.py
|
setup.py
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7rc4", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
|
Update to latest Pylons ver
|
Update to latest Pylons ver
|
Python
|
bsd-3-clause
|
Pylons/kai,Pylons/kai
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7rc4", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
Update to latest Pylons ver
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
|
<commit_before>try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7rc4", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
<commit_msg>Update to latest Pylons ver<commit_after>
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
|
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7rc4", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
Update to latest Pylons vertry:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
|
<commit_before>try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7rc4", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
<commit_msg>Update to latest Pylons ver<commit_after>try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name='kai',
version='0.1',
description='',
author='Ben Bangert',
author_email='ben@groovie.org',
install_requires=[
"Pylons>=0.9.7", "CouchDB>=0.4", "python-openid>=2.2.1",
"pytz>=2008i", "Babel>=0.9.4", "tw.forms==0.9.3", "docutils>=0.5",
"PyXML>=0.8.4", "cssutils>=0.9.6a0", "Pygments>=1.0",
],
setup_requires=["PasteScript>=1.6.3"],
packages=find_packages(exclude=['ez_setup']),
include_package_data=True,
test_suite='nose.collector',
package_data={'kai': ['i18n/*/LC_MESSAGES/*.mo']},
message_extractors = {'kai': [
('**.py', 'python', None),
('templates/**.mako', 'mako', {'input_encoding': 'utf-8'}),
('public/**', 'ignore', None)]},
zip_safe=False,
paster_plugins=['PasteScript', 'Pylons'],
entry_points="""
[paste.app_factory]
main = kai.config.middleware:make_app
[paste.app_install]
main = pylons.util:PylonsInstaller
""",
)
|
4182b5edbc635842429e77cf8bb1b565f2ca6e31
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.0.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.1.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
|
Bump version: 1.0.0 -> 1.1.0
|
Bump version: 1.0.0 -> 1.1.0
|
Python
|
mit
|
vortec/versionbump
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.0.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
Bump version: 1.0.0 -> 1.1.0
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.1.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
|
<commit_before># -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.0.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
<commit_msg>Bump version: 1.0.0 -> 1.1.0<commit_after>
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.1.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.0.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
Bump version: 1.0.0 -> 1.1.0# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.1.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
|
<commit_before># -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.0.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
<commit_msg>Bump version: 1.0.0 -> 1.1.0<commit_after># -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
description = 'Generate version strings based on semantic versioning rules.'
long_description = str(open('README.rst', 'rb').read())
setup(name='versionbump',
version='1.1.0',
license='MIT',
author='Fabian Kochem',
url='https://github.com/vortec/versionbump',
description=description,
long_description=long_description,
packages=find_packages(),
install_requires=[
],
entry_points={
'console_scripts': [
'versionbump = versionbump.command_line:main',
]
},
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Utilities'
),
)
|
702f331cc70ee989d7542c81d85fc3dccbf550a0
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# We want to restrict newer versions while we deal with upstream breaking changes.
discordpy_version = '==0.11.0'
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]{}'.format(discordpy_version),
'youtube_dl'
]
}
# scripts=['__main__.py']
)
|
#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]',
'youtube_dl'
]
}
# scripts=['__main__.py']
)
|
Remove outdated discord.py version requirement
|
Remove outdated discord.py version requirement
We'll come to incompatible versions when we get there. So far I've just
been updating discord.py without issue.
|
Python
|
mit
|
TAOTheCrab/CrabBot
|
#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# We want to restrict newer versions while we deal with upstream breaking changes.
discordpy_version = '==0.11.0'
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]{}'.format(discordpy_version),
'youtube_dl'
]
}
# scripts=['__main__.py']
)
Remove outdated discord.py version requirement
We'll come to incompatible versions when we get there. So far I've just
been updating discord.py without issue.
|
#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]',
'youtube_dl'
]
}
# scripts=['__main__.py']
)
|
<commit_before>#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# We want to restrict newer versions while we deal with upstream breaking changes.
discordpy_version = '==0.11.0'
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]{}'.format(discordpy_version),
'youtube_dl'
]
}
# scripts=['__main__.py']
)
<commit_msg>Remove outdated discord.py version requirement
We'll come to incompatible versions when we get there. So far I've just
been updating discord.py without issue.<commit_after>
|
#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]',
'youtube_dl'
]
}
# scripts=['__main__.py']
)
|
#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# We want to restrict newer versions while we deal with upstream breaking changes.
discordpy_version = '==0.11.0'
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]{}'.format(discordpy_version),
'youtube_dl'
]
}
# scripts=['__main__.py']
)
Remove outdated discord.py version requirement
We'll come to incompatible versions when we get there. So far I've just
been updating discord.py without issue.#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]',
'youtube_dl'
]
}
# scripts=['__main__.py']
)
|
<commit_before>#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# We want to restrict newer versions while we deal with upstream breaking changes.
discordpy_version = '==0.11.0'
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]{}'.format(discordpy_version),
'youtube_dl'
]
}
# scripts=['__main__.py']
)
<commit_msg>Remove outdated discord.py version requirement
We'll come to incompatible versions when we get there. So far I've just
been updating discord.py without issue.<commit_after>#!/usr/bin/env python3
# WARNING! WIP, doesn't work correctly.
# Still needs to understand the assets folder and make an executable out of __main__
from setuptools import setup, find_packages
# TODO read README(.rst? .md looks bad on pypi) for long_description.
# Could use pandoc, but the end user shouldn't need to do this in setup.
# Alt. could have package-specific description. More error-prone though.
setup(
# More permanent entries
name='crabbot',
author='TAOTheCrab',
url='https://github.com/TAOTheCrab/CrabBot',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License'
'Programming Language :: Python :: 3.5'
],
# Entries likely to be modified
description='A simple Discord bot',
version='0.0.1', # TODO figure out a version scheme. Ensure this gets updated.
packages=find_packages(), # A little lazy
install_requires=[
'discord.py{}'.format(discordpy_version)
],
extras_require={
'voice': [
'discord.py[voice]',
'youtube_dl'
]
}
# scripts=['__main__.py']
)
|
fa55a1a93dd53023159c4a21963361d9678e52cf
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
Remove reference to inexistent file.
|
Remove reference to inexistent file.
|
Python
|
isc
|
hobarrera/envsettings,hobarrera/envsettings
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
Remove reference to inexistent file.
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
<commit_before>from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
<commit_msg>Remove reference to inexistent file.<commit_after>
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
Remove reference to inexistent file.from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
<commit_before>from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
package_data={'': ['logging.json']},
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
<commit_msg>Remove reference to inexistent file.<commit_after>from distutils.core import setup
setup(
name='pyenvsettings',
version='1.0.0',
author='Hugo Osvaldo Barrera',
author_email='hugo@barrera.io',
packages=['envsettings'],
url='https://github.com/hobarrera/envsettings',
license='ISC',
description="Read settings from environment variables."
)
|
255d561a68712ed1f40f673cbb1c428815a5febd
|
__init__.py
|
__init__.py
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "1.0.4"
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "2.4.0"
|
Make the distutils version number the same as the python version. It must be literally contained here, because it is still possible to install this distutils in older Python versions.
|
Make the distutils version number the same as the python version. It
must be literally contained here, because it is still possible to
install this distutils in older Python versions.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "1.0.4"
Make the distutils version number the same as the python version. It
must be literally contained here, because it is still possible to
install this distutils in older Python versions.
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "2.4.0"
|
<commit_before>"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "1.0.4"
<commit_msg>Make the distutils version number the same as the python version. It
must be literally contained here, because it is still possible to
install this distutils in older Python versions.<commit_after>
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "2.4.0"
|
"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "1.0.4"
Make the distutils version number the same as the python version. It
must be literally contained here, because it is still possible to
install this distutils in older Python versions."""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "2.4.0"
|
<commit_before>"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "1.0.4"
<commit_msg>Make the distutils version number the same as the python version. It
must be literally contained here, because it is still possible to
install this distutils in older Python versions.<commit_after>"""distutils
The main package for the Python Module Distribution Utilities. Normally
used from a setup script as
from distutils.core import setup
setup (...)
"""
# This module should be kept compatible with Python 1.5.2.
__revision__ = "$Id$"
__version__ = "2.4.0"
|
e1ad029fc8d9f34fc5fbbd5d2a12b9f6bd198bff
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
|
#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
|
Change development status to Alpha
|
Change development status to Alpha
It better reflects the project's immature state.
|
Python
|
mit
|
Spferical/visram
|
#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
Change development status to Alpha
It better reflects the project's immature state.
|
#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
|
<commit_before>#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
<commit_msg>Change development status to Alpha
It better reflects the project's immature state.<commit_after>
|
#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
|
#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
Change development status to Alpha
It better reflects the project's immature state.#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
|
<commit_before>#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
<commit_msg>Change development status to Alpha
It better reflects the project's immature state.<commit_after>#!/usr/bin/env python2
from distutils.core import setup
setup(name='visram',
version='0.1.0',
description='Graphical RAM/CPU Visualizer',
license='MIT',
author='Matthew Pfeiffer',
author_email='spferical@gmail.com',
url='http://github.com/Spferical/visram',
packages=['visram', 'visram.test'],
scripts=['bin/visram'],
platforms=['any'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: X11 Applications',
'Environment :: MacOS X :: Cocoa',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2 :: Only',
'Topic :: System :: Monitoring',
],
)
|
b890061942473f5ada953a7c33847937abdc36b0
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
install_requires=['requests', 'GeoIP']
)
|
from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
package_data={'utilitybelt': ['data/GeoLiteCity.dat']},
install_requires=['requests', 'GeoIP']
)
|
Add Geolite data to package
|
Add Geolite data to package
|
Python
|
mit
|
yolothreat/utilitybelt,yolothreat/utilitybelt
|
from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
install_requires=['requests', 'GeoIP']
)
Add Geolite data to package
|
from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
package_data={'utilitybelt': ['data/GeoLiteCity.dat']},
install_requires=['requests', 'GeoIP']
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
install_requires=['requests', 'GeoIP']
)
<commit_msg>Add Geolite data to package<commit_after>
|
from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
package_data={'utilitybelt': ['data/GeoLiteCity.dat']},
install_requires=['requests', 'GeoIP']
)
|
from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
install_requires=['requests', 'GeoIP']
)
Add Geolite data to packagefrom setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
package_data={'utilitybelt': ['data/GeoLiteCity.dat']},
install_requires=['requests', 'GeoIP']
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
install_requires=['requests', 'GeoIP']
)
<commit_msg>Add Geolite data to package<commit_after>from setuptools import setup, find_packages
setup(
name="UtilityBelt",
version="0.1",
description="Utilities to make you a CND Batman",
url="https://github.com/sroberts/utilitybelt",
license="MIT",
packages=find_packages(),
package_data={'utilitybelt': ['data/GeoLiteCity.dat']},
install_requires=['requests', 'GeoIP']
)
|
6c1c38a9c293527bfb4bb5689675f0ef6b385f75
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
|
from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt',
'locale/*.*',
'locale/*/LC_MESSAGES/*.*']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
|
Add translation files to package data
|
Add translation files to package data
|
Python
|
bsd-2-clause
|
Kotti/kotti_contactform
|
from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
Add translation files to package data
|
from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt',
'locale/*.*',
'locale/*/LC_MESSAGES/*.*']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
<commit_msg>Add translation files to package data<commit_after>
|
from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt',
'locale/*.*',
'locale/*/LC_MESSAGES/*.*']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
|
from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
Add translation files to package datafrom setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt',
'locale/*.*',
'locale/*/LC_MESSAGES/*.*']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
<commit_msg>Add translation files to package data<commit_after>from setuptools import setup, find_packages
import sys, os
version = '0.1'
setup(name='kotti_contactform',
version=version,
description="Simple contact form for Kotti sites",
long_description="""\
This is an extension to Kotti that allows to add simple contact forms to your website.""",
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python",
"Framework :: Pylons",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"License :: OSI Approved :: BSD License",
],
keywords='kotti contact form',
author='Christian Neumann',
author_email='christian@datenkarussell.de',
url='http://pypi.python.org/pypi/kotti_contactform',
license='BSD License',
packages=['kotti_contactform'],
package_data={'kotti_contactform': ['templates/*.pt',
'locale/*.*',
'locale/*/LC_MESSAGES/*.*']},
include_package_data=True,
zip_safe=False,
install_requires=[
'Kotti',
'pyramid_mailer',
'Babel',
],
entry_points="""
# -*- Entry points: -*-
""",
message_extractors = { "kotti_contactform": [
("**.py", "chameleon_python", None ),
("**.pt", "chameleon_xml", None ),
]},
)
|
1b992df4b7e8a36a5836b05217861cb1a7c62f8b
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0']
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0'],
extras_require={'doc': ['sphinx', 'sphinx-rtd-theme', 'recommonmark']}
)
|
Add optional dependencies for the docs
|
Add optional dependencies for the docs
|
Python
|
apache-2.0
|
DLTK/DLTK
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0']
)
Add optional dependencies for the docs
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0'],
extras_require={'doc': ['sphinx', 'sphinx-rtd-theme', 'recommonmark']}
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0']
)
<commit_msg>Add optional dependencies for the docs<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0'],
extras_require={'doc': ['sphinx', 'sphinx-rtd-theme', 'recommonmark']}
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0']
)
Add optional dependencies for the docs#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0'],
extras_require={'doc': ['sphinx', 'sphinx-rtd-theme', 'recommonmark']}
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0']
)
<commit_msg>Add optional dependencies for the docs<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='dltk',
version='0.1',
description='Deep Learning Toolkit for Medical Image Analysis',
author='DLTK Contributors',
url='https://dltk.github.io',
packages=find_packages(exclude=['_docs', 'contrib', 'data', 'examples']),
keywords='machine learning tensorflow deep learning biomedical imaging',
license='Apache License 2.0',
classifiers=['Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'],
install_requires=['numpy>=1.12.1', 'scipy>=0.19.0', 'pandas>=0.19.0', 'matplotlib>=1.5.3',
'scikit-image>=0.13.0', 'tensorflow-gpu>=1.1.0', 'SimpleITK>=1.0.0', 'jupyter>=1.0.0'],
extras_require={'doc': ['sphinx', 'sphinx-rtd-theme', 'recommonmark']}
)
|
e3c8e72341fea566113e510b058141c9ff75c0ea
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
|
Mark package as a stable
|
Mark package as a stable
|
Python
|
mit
|
moggers87/lmtpd
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
Mark package as a stable
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
<commit_msg>Mark package as a stable<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
Mark package as a stabletry:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
<commit_msg>Mark package as a stable<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A LMTP server class',
'long_description': 'A LMTP counterpart to smtpd in the Python standard library',
'author': 'Matt Molyneaux',
'url': 'https://github.com/moggers87/lmtpd',
'download_url': 'http://pypi.python.org/pypi/lmtpd',
'author_email': 'moggers87+git@moggers87.co.uk',
'version': '6.1.0',
'license': 'MIT', # apparently nothing searches classifiers :(
'packages': ['lmtpd'],
'data_files': [('share/lmtpd', ['LICENSE', 'PY-LIC'])],
'name': 'lmtpd',
'classifiers': [
'License :: OSI Approved :: MIT License',
'License :: OSI Approved :: Python Software Foundation License',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Intended Audience :: Developers',
'Topic :: Communications :: Email'],
'test_suite': 'lmtpd.tests'
}
setup(**config)
|
c1423ff0782c08886b2ab46355ad6d94fc54ba19
|
setup.py
|
setup.py
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=['transitions', 'blinker'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=[
'transitions==0.2.5',
'blinker',
'websockets'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)
|
Add missing requirements for websockets HBMQTT-25
|
Add missing requirements for websockets
HBMQTT-25
|
Python
|
mit
|
beerfactory/hbmqtt
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=['transitions', 'blinker'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)Add missing requirements for websockets
HBMQTT-25
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=[
'transitions==0.2.5',
'blinker',
'websockets'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)
|
<commit_before># Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=['transitions', 'blinker'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)<commit_msg>Add missing requirements for websockets
HBMQTT-25<commit_after>
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=[
'transitions==0.2.5',
'blinker',
'websockets'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)
|
# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=['transitions', 'blinker'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)Add missing requirements for websockets
HBMQTT-25# Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=[
'transitions==0.2.5',
'blinker',
'websockets'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)
|
<commit_before># Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=['transitions', 'blinker'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)<commit_msg>Add missing requirements for websockets
HBMQTT-25<commit_after># Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
from setuptools import setup, find_packages
from hbmqtt.version import get_version
setup(
name="hbmqtt",
version=get_version(),
description="HBMQTT - HomeBrew MQTT\nclient/brocker using Python 3.4 asyncio library",
author="Nicolas Jouanin",
author_email='nico@beerfactory.org',
url="https://github.com/beerfactory/hbmqtt",
license='MIT',
packages=find_packages(exclude=['tests']),
install_requires=[
'transitions==0.2.5',
'blinker',
'websockets'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3.4',
'Topic :: Communications',
'Topic :: Internet'
]
)
|
469546a923aa4eceda787d468a6f4312594d45d0
|
setup.py
|
setup.py
|
###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@1.2.1"
]
)
|
###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@2.0.0"
]
)
|
Use python3 version of cappy
|
Use python3 version of cappy
|
Python
|
bsd-2-clause
|
ctsit/nacculator,ctsit/nacculator,ctsit/nacculator
|
###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@1.2.1"
]
)
Use python3 version of cappy
|
###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@2.0.0"
]
)
|
<commit_before>###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@1.2.1"
]
)
<commit_msg>Use python3 version of cappy<commit_after>
|
###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@2.0.0"
]
)
|
###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@1.2.1"
]
)
Use python3 version of cappy###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@2.0.0"
]
)
|
<commit_before>###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@1.2.1"
]
)
<commit_msg>Use python3 version of cappy<commit_after>###############################################################################
# Copyright 2015-2019 University of Florida. All rights reserved.
# This file is part of UF CTS-IT's NACCulator project.
# Use of this source code is governed by the license found in the LICENSE file.
###############################################################################
from setuptools import setup, find_packages
VERSION="0.5.0"
setup(
name="nacculator",
version=VERSION,
author="Taeber Rapczak",
author_email="taeber@ufl.edu",
maintainer="UF CTS-IT",
maintainer_email="ctsit@ctsi.ufl.edu",
url="https://github.com/ctsit/nacculator",
license="BSD 2-Clause",
description="CSV to NACC's UDS3 format converter",
keywords=["REDCap", "NACC", "UDS", "Clinical data"],
download_url="https://github.com/ctsit/nacculator/releases/tag/" + VERSION,
package_dir = {'nacc': 'nacc'},
packages = find_packages(),
entry_points={
"console_scripts": [
"redcap2nacc = nacc.redcap2nacc:main"
]
},
install_requires=[
"cappy @ git+https://github.com/ctsit/cappy.git@2.0.0"
]
)
|
f51589192d428b82acbebece6be73799e04a3f44
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
|
Fix for 'Unknown distribution option: install_requires' warning during install
|
Fix for 'Unknown distribution option: install_requires' warning during install
|
Python
|
mit
|
nyddle/pystash
|
from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
Fix for 'Unknown distribution option: install_requires' warning during install
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
|
<commit_before>from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
<commit_msg>Fix for 'Unknown distribution option: install_requires' warning during install<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
|
from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
Fix for 'Unknown distribution option: install_requires' warning during installtry:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
|
<commit_before>from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
<commit_msg>Fix for 'Unknown distribution option: install_requires' warning during install<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='pystash',
version='0.0.14',
author='Alexander Davydov',
author_email='nyddle@gmail.com',
packages=[ 'pystash' ],
scripts=[ 'bin/stash' ],
url='http://pypi.python.org/pypi/pystash/',
license='LICENSE.txt',
description='Save your code snippets in the cloud.',
install_requires=[
"args>=0.1.0",
"clint>=0.3.3",
"requests>=2.2.0",
"wsgiref>=0.1.2",
"xerox"
],
)
|
5a5a83abb5265dd0abc3c6306f65930c4ce012f2
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk-python'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-python-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
|
from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
|
Fix globus-sdk python package name
|
Fix globus-sdk python package name
To match recent change in SDK repo
|
Python
|
apache-2.0
|
globus/globus-cli,globus/globus-cli
|
from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk-python'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-python-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
Fix globus-sdk python package name
To match recent change in SDK repo
|
from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk-python'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-python-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
<commit_msg>Fix globus-sdk python package name
To match recent change in SDK repo<commit_after>
|
from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
|
from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk-python'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-python-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
Fix globus-sdk python package name
To match recent change in SDK repofrom setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk-python'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-python-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
<commit_msg>Fix globus-sdk python package name
To match recent change in SDK repo<commit_after>from setuptools import setup, find_packages
setup(
name="globus-cli",
version="0.1.0",
packages=find_packages(),
install_requires=['globus-sdk'],
# for now, install directly from GitHub
# TODO: once this is on pypi, install from there
dependency_links=[
('https://github.com/globusonline/globus-sdk-python/'
'archive/master.zip#egg=globus-sdk-0.1')
],
entry_points={
'console_scripts': ['globus = globus_cli:run_command']
},
# descriptive info, non-critical
description="Globus CLI",
long_description=open("README.md").read(),
author="Stephen Rosen",
author_email="sirosen@globus.org",
url="https://github.com/globusonline/globus-cli",
keywords=["globus", "cli", "command line"],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: POSIX",
"Programming Language :: Python",
],
)
|
226f2a5674c9d1d16801cfe7b8c5ac636e849b4a
|
setup.py
|
setup.py
|
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
'pytest-runner',
],
tests_require=[
"pytest",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
tests_require=[
"pytest",
"numpy",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
Remove pytest-runner from install requirements, add numpy as test requirement
|
Remove pytest-runner from install requirements, add numpy as test requirement
|
Python
|
mit
|
lukasschwab/arxiv.py
|
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
'pytest-runner',
],
tests_require=[
"pytest",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
Remove pytest-runner from install requirements, add numpy as test requirement
|
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
tests_require=[
"pytest",
"numpy",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
<commit_before>from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
'pytest-runner',
],
tests_require=[
"pytest",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
<commit_msg>Remove pytest-runner from install requirements, add numpy as test requirement<commit_after>
|
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
tests_require=[
"pytest",
"numpy",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
'pytest-runner',
],
tests_require=[
"pytest",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
Remove pytest-runner from install requirements, add numpy as test requirementfrom setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
tests_require=[
"pytest",
"numpy",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
<commit_before>from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
'pytest-runner',
],
tests_require=[
"pytest",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
<commit_msg>Remove pytest-runner from install requirements, add numpy as test requirement<commit_after>from setuptools import setup
version = "0.5.1"
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name="arxiv",
version=version,
packages=["arxiv"],
# dependencies
install_requires=[
'feedparser',
'requests',
],
tests_require=[
"pytest",
"numpy",
],
# metadata for upload to PyPI
author="Lukas Schwab",
author_email="lukas.schwab@gmail.com",
description="Python wrapper for the arXiv API: http://arxiv.org/help/api/",
long_description=long_description,
long_description_content_type="text/markdown",
license="MIT",
keywords="arxiv api wrapper academic journals papers",
url="https://github.com/lukasschwab/arxiv.py",
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
)
|
fe2ce1c782730690f92651439abe33b6252821b8
|
setup.py
|
setup.py
|
from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx_attr_cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx-attr-cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
Rename package to use dashes.
|
Rename package to use dashes.
|
Python
|
isc
|
CTPUG/mdx_attr_cols
|
from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx_attr_cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
Rename package to use dashes.
|
from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx-attr-cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx_attr_cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Rename package to use dashes.<commit_after>
|
from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx-attr-cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx_attr_cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
Rename package to use dashes.from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx-attr-cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx_attr_cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Rename package to use dashes.<commit_after>from setuptools import setup
REQUIRES = [
'markdown',
'mdx_outline',
]
SOURCES = []
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name="mdx-attr-cols",
version="0.1.1",
url='http://github.com/CTPUG/mdx_attr_cols',
license='MIT',
description="A bootstrap 3 row and columns extension for Markdown",
long_description=long_description,
author='CTPUG',
author_email='ctpug@googlegroups.com',
py_modules=[
'mdx_attr_cols',
],
install_requires=REQUIRES,
dependency_links=SOURCES,
setup_requires=[
# Add setuptools-git, so we get correct behaviour for
# include_package_data
'setuptools_git >= 1.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
],
)
|
f964c435efce30a3ca3cb10185666e8e8af7a0db
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD 2-Clause License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Correct trove classifier for license
|
Correct trove classifier for license
|
Python
|
bsd-2-clause
|
baijum/flask-esclient
|
from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD 2-Clause License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Correct trove classifier for license
|
from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD 2-Clause License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Correct trove classifier for license<commit_after>
|
from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD 2-Clause License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Correct trove classifier for licensefrom setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD 2-Clause License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Correct trove classifier for license<commit_after>from setuptools import setup, find_packages
with open('README.rst') as fd:
long_description = fd.read()
setup(name='Flask-ESClient',
version='0.1.1',
description='Flask extension for ESClient (elasticsearch client)',
long_description=long_description,
author='Baiju Muthukadan',
author_email='baiju.m.mail@gmail.com',
url='https://github.com/baijum/flask-esclient',
py_modules=['flask_esclient',
'test_flask_esclient'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'Flask',
'ESClient',
],
test_suite='test_flask_esclient.suite',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
3c9a062ebb7745fbdefcf836165ef5cd85825417
|
setup.py
|
setup.py
|
from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
# '-ferror-limit=1'
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name=extension_name,
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
|
from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name='pyaccess',
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
|
Use pyaccess as the package name.
|
Use pyaccess as the package name.
|
Python
|
agpl-3.0
|
UDST/pandana,SANDAG/pandana,UDST/pandana,rafapereirabr/pandana,SANDAG/pandana,UDST/pandana,waddell/pandana,waddell/pandana,waddell/pandana,synthicity/pandana,rafapereirabr/pandana,osPlanning/pandana,waddell/pandana,osPlanning/pandana,osPlanning/pandana,osPlanning/pandana,rafapereirabr/pandana,synthicity/pandana,rafapereirabr/pandana,SANDAG/pandana,UDST/pandana,synthicity/pandana,SANDAG/pandana,synthicity/pandana
|
from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
# '-ferror-limit=1'
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name=extension_name,
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
Use pyaccess as the package name.
|
from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name='pyaccess',
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
|
<commit_before>from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
# '-ferror-limit=1'
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name=extension_name,
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
<commit_msg>Use pyaccess as the package name.<commit_after>
|
from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name='pyaccess',
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
|
from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
# '-ferror-limit=1'
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name=extension_name,
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
Use pyaccess as the package name.from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name='pyaccess',
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
|
<commit_before>from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
# '-ferror-limit=1'
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name=extension_name,
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
<commit_msg>Use pyaccess as the package name.<commit_after>from setuptools import setup, Extension
import numpy as np
import os
extension_name = '_pyaccess'
extension_version = '.1'
include_dirs = [
'ann_1.1.2/include',
'sparsehash-2.0.2/src',
np.get_include(),
'.'
]
library_dirs = [
'ann_1.1.2/lib',
'contraction_hierarchies'
]
packages = ['pyaccess']
libraries = [ 'ANN', 'ch', 'gomp']
source_files = [
'pyaccess/accessibility.cpp',
'pyaccess/graphalg.cpp',
'pyaccess/nearestneighbor.cpp',
'pyaccess/pyaccesswrap.cpp'
]
extra_compile_args = [
'-shared',
'-DMACOSX',
'-DLINUX',
'-w',
'-std=gnu++0x',
'-O3',
'-fopenmp',
'-fpic',
'-g',
'-Wno-deprecated',
]
py_modules=['pyaccess/pyaccess', 'pyaccess/urbanaccess']
setup(
packages=packages,
py_modules=py_modules,
name='pyaccess',
version=extension_version,
ext_modules=[
Extension(
extension_name,
source_files,
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args
)
]
)
|
1e47c53e6c96007fe41834e9b0ba2602a6f0e860
|
setup.py
|
setup.py
|
# -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.5',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
|
# -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.6',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
|
Add for merge with groups.listAll
|
Add for merge with groups.listAll
|
Python
|
mit
|
jadolg/rocketchat_API
|
# -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.5',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
Add for merge with groups.listAll
|
# -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.6',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
|
<commit_before># -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.5',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
<commit_msg>Add for merge with groups.listAll<commit_after>
|
# -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.6',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
|
# -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.5',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
Add for merge with groups.listAll# -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.6',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
|
<commit_before># -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.5',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
<commit_msg>Add for merge with groups.listAll<commit_after># -*-coding:utf-8-*-
from setuptools import setup
setup(
name='rocketchat_API',
version='0.6.6',
packages=['rocketchat_API', 'rocketchat_API.APIExceptions'],
url='https://github.com/jadolg/rocketchat_API',
license='MIT',
author='Jorge Alberto Díaz Orozco',
author_email='diazorozcoj@gmail.com',
description='Python API wrapper for Rocket.Chat',
long_description=open("README.md", "r").read(),
install_requires=(
'requests',
)
)
|
e04417d0811fba4e36ca16e3c731f35033be09f9
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
VERSION = '0.1.0dev'
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
|
import os
import glob
from setuptools import setup, find_packages
VERSION = '0.1.0dev'
def mo_files():
linguas = glob.glob('brew/translations/*/LC_MESSAGES/messages.mo')
lpaths = [os.path.dirname(d) for d in linguas]
return zip(lpaths, [[l] for l in linguas])
# Data files to be installed after build time
data_files = mo_files()
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
data_files=data_files,
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
|
Install .mo catalogs if available
|
Install .mo catalogs if available
|
Python
|
mit
|
brewpeople/brewmeister,brewpeople/brewmeister,brewpeople/brewmeister
|
from setuptools import setup, find_packages
VERSION = '0.1.0dev'
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
Install .mo catalogs if available
|
import os
import glob
from setuptools import setup, find_packages
VERSION = '0.1.0dev'
def mo_files():
linguas = glob.glob('brew/translations/*/LC_MESSAGES/messages.mo')
lpaths = [os.path.dirname(d) for d in linguas]
return zip(lpaths, [[l] for l in linguas])
# Data files to be installed after build time
data_files = mo_files()
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
data_files=data_files,
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
|
<commit_before>from setuptools import setup, find_packages
VERSION = '0.1.0dev'
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
<commit_msg>Install .mo catalogs if available<commit_after>
|
import os
import glob
from setuptools import setup, find_packages
VERSION = '0.1.0dev'
def mo_files():
linguas = glob.glob('brew/translations/*/LC_MESSAGES/messages.mo')
lpaths = [os.path.dirname(d) for d in linguas]
return zip(lpaths, [[l] for l in linguas])
# Data files to be installed after build time
data_files = mo_files()
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
data_files=data_files,
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
|
from setuptools import setup, find_packages
VERSION = '0.1.0dev'
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
Install .mo catalogs if availableimport os
import glob
from setuptools import setup, find_packages
VERSION = '0.1.0dev'
def mo_files():
linguas = glob.glob('brew/translations/*/LC_MESSAGES/messages.mo')
lpaths = [os.path.dirname(d) for d in linguas]
return zip(lpaths, [[l] for l in linguas])
# Data files to be installed after build time
data_files = mo_files()
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
data_files=data_files,
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
|
<commit_before>from setuptools import setup, find_packages
VERSION = '0.1.0dev'
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
<commit_msg>Install .mo catalogs if available<commit_after>import os
import glob
from setuptools import setup, find_packages
VERSION = '0.1.0dev'
def mo_files():
linguas = glob.glob('brew/translations/*/LC_MESSAGES/messages.mo')
lpaths = [os.path.dirname(d) for d in linguas]
return zip(lpaths, [[l] for l in linguas])
# Data files to be installed after build time
data_files = mo_files()
setup(
name='Brewmeister',
version=VERSION,
long_description=open('README.rst').read(),
packages=find_packages(),
include_package_data=True,
zip_safe=False,
scripts=['bin/brewmeister'],
data_files=data_files,
install_requires=[
'Babel>=1.3',
'docutils>=0.11',
'Flask>=0.10.1',
'Flask-Babel>=0.9',
'Flask-Cache>=0.12',
'Flask-PyMongo>=0.3.0',
'Flask-Script>=0.6.3',
'fysom>=1.0.14',
'jsonschema>=2.3.0',
'pyserial>=2.7',
'reportlab>=2.7',
]
)
|
24a6ff064036248043ff609ec7ba1925832219c4
|
setup.py
|
setup.py
|
from setuptools import setup
from downstream_node import __version__
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=[
'flask',
'mysql-python',
'flask-sqlalchemy',
'heartbeat==0.1.2',
],
dependency_links=[
'https://github.com/Storj/heartbeat/archive/v0.1.2.tar.gz#egg=heartbeat-0.1.2'
],
)
|
import sys
from setuptools import setup
from downstream_node import __version__
# Reqirements for all versions of Python
install_requires = [
'flask',
'pymysql',
'flask-sqlalchemy',
'heartbeat==0.1.2',
]
# Requirements for Python 2
if sys.version_info < (3,):
extras = [
'mysql-python',
]
install_requires.extend(extras)
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=install_requires,
dependency_links=[
'git+https://github.com/Storj/heartbeat.git@v0.1.2#egg=heartbeat-0.1.2'
],
)
|
Add proper dependency links and install_requires lines for Py3 and Py2 support
|
Add proper dependency links and install_requires lines for Py3 and Py2 support
|
Python
|
mit
|
Storj/downstream-node,Storj/downstream-node
|
from setuptools import setup
from downstream_node import __version__
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=[
'flask',
'mysql-python',
'flask-sqlalchemy',
'heartbeat==0.1.2',
],
dependency_links=[
'https://github.com/Storj/heartbeat/archive/v0.1.2.tar.gz#egg=heartbeat-0.1.2'
],
)
Add proper dependency links and install_requires lines for Py3 and Py2 support
|
import sys
from setuptools import setup
from downstream_node import __version__
# Reqirements for all versions of Python
install_requires = [
'flask',
'pymysql',
'flask-sqlalchemy',
'heartbeat==0.1.2',
]
# Requirements for Python 2
if sys.version_info < (3,):
extras = [
'mysql-python',
]
install_requires.extend(extras)
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=install_requires,
dependency_links=[
'git+https://github.com/Storj/heartbeat.git@v0.1.2#egg=heartbeat-0.1.2'
],
)
|
<commit_before>from setuptools import setup
from downstream_node import __version__
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=[
'flask',
'mysql-python',
'flask-sqlalchemy',
'heartbeat==0.1.2',
],
dependency_links=[
'https://github.com/Storj/heartbeat/archive/v0.1.2.tar.gz#egg=heartbeat-0.1.2'
],
)
<commit_msg>Add proper dependency links and install_requires lines for Py3 and Py2 support<commit_after>
|
import sys
from setuptools import setup
from downstream_node import __version__
# Reqirements for all versions of Python
install_requires = [
'flask',
'pymysql',
'flask-sqlalchemy',
'heartbeat==0.1.2',
]
# Requirements for Python 2
if sys.version_info < (3,):
extras = [
'mysql-python',
]
install_requires.extend(extras)
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=install_requires,
dependency_links=[
'git+https://github.com/Storj/heartbeat.git@v0.1.2#egg=heartbeat-0.1.2'
],
)
|
from setuptools import setup
from downstream_node import __version__
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=[
'flask',
'mysql-python',
'flask-sqlalchemy',
'heartbeat==0.1.2',
],
dependency_links=[
'https://github.com/Storj/heartbeat/archive/v0.1.2.tar.gz#egg=heartbeat-0.1.2'
],
)
Add proper dependency links and install_requires lines for Py3 and Py2 supportimport sys
from setuptools import setup
from downstream_node import __version__
# Reqirements for all versions of Python
install_requires = [
'flask',
'pymysql',
'flask-sqlalchemy',
'heartbeat==0.1.2',
]
# Requirements for Python 2
if sys.version_info < (3,):
extras = [
'mysql-python',
]
install_requires.extend(extras)
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=install_requires,
dependency_links=[
'git+https://github.com/Storj/heartbeat.git@v0.1.2#egg=heartbeat-0.1.2'
],
)
|
<commit_before>from setuptools import setup
from downstream_node import __version__
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=[
'flask',
'mysql-python',
'flask-sqlalchemy',
'heartbeat==0.1.2',
],
dependency_links=[
'https://github.com/Storj/heartbeat/archive/v0.1.2.tar.gz#egg=heartbeat-0.1.2'
],
)
<commit_msg>Add proper dependency links and install_requires lines for Py3 and Py2 support<commit_after>import sys
from setuptools import setup
from downstream_node import __version__
# Reqirements for all versions of Python
install_requires = [
'flask',
'pymysql',
'flask-sqlalchemy',
'heartbeat==0.1.2',
]
# Requirements for Python 2
if sys.version_info < (3,):
extras = [
'mysql-python',
]
install_requires.extend(extras)
setup(
name='downstream-node',
version=__version__,
packages=['downstream_node'],
url='',
license='',
author='Storj Labs',
author_email='info@storj.io',
description='',
install_requires=install_requires,
dependency_links=[
'git+https://github.com/Storj/heartbeat.git@v0.1.2#egg=heartbeat-0.1.2'
],
)
|
3a9d53fd1ad0687a5ed3564c44a7624488a83d4b
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
|
from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2.1',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2.1',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
|
Update the release following the fixes to the links.
|
Update the release following the fixes to the links.
|
Python
|
mit
|
pwcazenave/PyFVCOM
|
from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
Update the release following the fixes to the links.
|
from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2.1',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2.1',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
|
<commit_before>from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
<commit_msg>Update the release following the fixes to the links.<commit_after>
|
from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2.1',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2.1',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
|
from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
Update the release following the fixes to the links.from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2.1',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2.1',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
|
<commit_before>from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
<commit_msg>Update the release following the fixes to the links.<commit_after>from distutils.core import setup
setup(
name = 'PyFVCOM',
packages = ['PyFVCOM'],
version = '1.2.1',
description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."),
author = 'Pierre Cazenave',
author_email = 'pica@pml.ac.uk',
url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM',
download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.2.1',
keywords = ['fvcom', 'unstructured grid', 'mesh'],
license = 'MIT',
platforms = 'any',
classifiers = []
)
|
4cf871af11eb08b3b5b8671c4b5042c6f9f2f344
|
tests/test__pycompat.py
|
tests/test__pycompat.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
|
Add a basic test for irange
|
Add a basic test for irange
Make sure `irange` is there, it doesn't return a list, and it acts like
`range` on some test arguments.
|
Python
|
bsd-3-clause
|
jakirkham/dask-distance
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
Add a basic test for irange
Make sure `irange` is there, it doesn't return a list, and it acts like
`range` on some test arguments.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
<commit_msg>Add a basic test for irange
Make sure `irange` is there, it doesn't return a list, and it acts like
`range` on some test arguments.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
Add a basic test for irange
Make sure `irange` is there, it doesn't return a list, and it acts like
`range` on some test arguments.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
<commit_msg>Add a basic test for irange
Make sure `irange` is there, it doesn't return a list, and it acts like
`range` on some test arguments.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import dask_distance._pycompat
def test_irange():
r = dask_distance._pycompat.irange(5)
assert not isinstance(r, list)
assert list(r) == [0, 1, 2, 3, 4]
|
5cf5f6c16f8430cf1939b3775982b9dabbdc4123
|
setup.py
|
setup.py
|
from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.18'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.24'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
Update python-magic dependency to 0.4.24.
|
comment_parser: Update python-magic dependency to 0.4.24.
|
Python
|
mit
|
jeanralphaviles/comment_parser
|
from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.18'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
comment_parser: Update python-magic dependency to 0.4.24.
|
from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.24'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
<commit_before>from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.18'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
<commit_msg>comment_parser: Update python-magic dependency to 0.4.24.<commit_after>
|
from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.24'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.18'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
comment_parser: Update python-magic dependency to 0.4.24.from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.24'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
<commit_before>from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.18'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
<commit_msg>comment_parser: Update python-magic dependency to 0.4.24.<commit_after>from setuptools import setup
def readme():
with open('README.md') as readme_file:
return readme_file.read()
setup(
name='comment_parser',
version='1.2.3',
description='Parse comments from various source files.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Documentation',
'License :: OSI Approved :: MIT License'
],
url='http://github.com/jeanralphaviles/comment_parser',
author='Jean-Ralph Aviles',
author_email='jeanralph.aviles+pypi@gmail.com',
license='MIT',
long_description=readme(),
long_description_content_type='text/markdown',
packages=['comment_parser', 'comment_parser.parsers'],
install_requires=['python-magic==0.4.24'],
test_suite='nose.collector',
tests_require=['nose'],
zip_safe=False)
|
87b9e240f3065fcd1c057ccf8698c2e824d113a9
|
ixprofile_client/tests/__init__.py
|
ixprofile_client/tests/__init__.py
|
"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
|
"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'social.apps.django_app.default',
),
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
|
Fix tests under Django 1.9
|
Fix tests under Django 1.9
|
Python
|
mit
|
infoxchange/ixprofile-client,infoxchange/ixprofile-client
|
"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
Fix tests under Django 1.9
|
"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'social.apps.django_app.default',
),
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
|
<commit_before>"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
<commit_msg>Fix tests under Django 1.9<commit_after>
|
"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'social.apps.django_app.default',
),
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
|
"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
Fix tests under Django 1.9"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'social.apps.django_app.default',
),
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
|
<commit_before>"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
<commit_msg>Fix tests under Django 1.9<commit_after>"""
Unit tests
"""
import django
from django.conf import settings
# Configure Django as required by some of the Gherkin steps
settings.configure(
CACHES={'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}},
INSTALLED_APPS=(
'django.contrib.auth',
'django.contrib.contenttypes',
'social.apps.django_app.default',
),
PROFILE_SERVER='dummy_server',
PROFILE_SERVER_KEY='mock_app',
PROFILE_SERVER_SECRET='dummy_secret',
SSL_CA_FILE=None,
DEBUG=True)
django.setup() # pylint:disable=no-member
|
6ab01b1e26184bf296cf58939db5299f07cd68f5
|
malcolm/modules/pmac/parts/__init__.py
|
malcolm/modules/pmac/parts/__init__.py
|
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
|
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
from .beamselectorpart import BeamSelectorPart
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
|
Add beamselectorpart to the PMAC module
|
Add beamselectorpart to the PMAC module
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
Add beamselectorpart to the PMAC module
|
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
from .beamselectorpart import BeamSelectorPart
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
|
<commit_before>from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
<commit_msg>Add beamselectorpart to the PMAC module<commit_after>
|
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
from .beamselectorpart import BeamSelectorPart
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
|
from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
Add beamselectorpart to the PMAC modulefrom .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
from .beamselectorpart import BeamSelectorPart
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
|
<commit_before>from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
<commit_msg>Add beamselectorpart to the PMAC module<commit_after>from .compoundmotorsinkportspart import CompoundMotorSinkPortsPart, \
APartName, ARbv, AGroup
from .cssourceportspart import CSSourcePortsPart, APartName, ARbv, AGroup
from .cspart import CSPart, AMri
from .pmacchildpart import PmacChildPart, AMri, APartName
from .pmacstatuspart import PmacStatusPart
from .pmactrajectorypart import PmacTrajectoryPart, AMri, APartName
from .rawmotorsinkportspart import RawMotorSinkPortsPart, AGroup
from .motorpremovepart import MotorPreMovePart, APartName, AMri
from .beamselectorpart import BeamSelectorPart
# Expose a nice namespace
from malcolm.core import submodule_all
__all__ = submodule_all(globals())
|
400978cac957684f1f5d1a19a585cc8ea7b4e616
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
|
from setuptools import setup, find_packages
version = '0.1.0'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
|
Bump version for more usefulness.
|
Bump version for more usefulness.
|
Python
|
mit
|
tilezen/scoville,tilezen/scoville,tilezen/scoville
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
Bump version for more usefulness.
|
from setuptools import setup, find_packages
version = '0.1.0'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
|
<commit_before>from setuptools import setup, find_packages
version = '0.0.1'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
<commit_msg>Bump version for more usefulness.<commit_after>
|
from setuptools import setup, find_packages
version = '0.1.0'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
|
from setuptools import setup, find_packages
version = '0.0.1'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
Bump version for more usefulness.from setuptools import setup, find_packages
version = '0.1.0'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
|
<commit_before>from setuptools import setup, find_packages
version = '0.0.1'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
<commit_msg>Bump version for more usefulness.<commit_after>from setuptools import setup, find_packages
version = '0.1.0'
setup(name='scoville',
version=version,
description="A tool for measureing tile latency.",
long_description=open('README.md').read(),
classifiers=[
# strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Utilities',
],
keywords='tile latency mvt',
author='Matt Amos, Mapzen',
author_email='matt.amos@mapzen.com',
url='https://github.com/tilezen/scoville',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyYAML',
'contextlib2',
'Shapely',
'pycurl',
'mapbox_vector_tile',
'psycopg2',
'boto3'
],
entry_points=dict(
console_scripts=[
'scoville = scoville.command:scoville_main',
]
)
)
|
cbb0bd366f829b2c917456256d178b54a2c9a735
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
dependency_links=['http://stsdas.stsci.edu/download/packages'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
|
Add dependency_links pointing to internal package index
|
Add dependency_links pointing to internal package index
git-svn-id: ae5d535d8549566df64d2c38e8f7097ffa427e83@13931 fe389314-cf27-0410-b35b-8c050e845b92
|
Python
|
bsd-3-clause
|
jhunkeler/acstools
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
Add dependency_links pointing to internal package index
git-svn-id: ae5d535d8549566df64d2c38e8f7097ffa427e83@13931 fe389314-cf27-0410-b35b-8c050e845b92
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
dependency_links=['http://stsdas.stsci.edu/download/packages'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
<commit_msg>Add dependency_links pointing to internal package index
git-svn-id: ae5d535d8549566df64d2c38e8f7097ffa427e83@13931 fe389314-cf27-0410-b35b-8c050e845b92<commit_after>
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
dependency_links=['http://stsdas.stsci.edu/download/packages'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
Add dependency_links pointing to internal package index
git-svn-id: ae5d535d8549566df64d2c38e8f7097ffa427e83@13931 fe389314-cf27-0410-b35b-8c050e845b92#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
dependency_links=['http://stsdas.stsci.edu/download/packages'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
<commit_msg>Add dependency_links pointing to internal package index
git-svn-id: ae5d535d8549566df64d2c38e8f7097ffa427e83@13931 fe389314-cf27-0410-b35b-8c050e845b92<commit_after>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup
setup(
setup_requires=['d2to1>=0.2.5', 'stsci.distutils>=0.3dev'],
dependency_links=['http://stsdas.stsci.edu/download/packages'],
d2to1=True,
use_2to3=True,
zip_safe=False
)
|
28b280bb04f806f614f6f2cd25ce779b551fef9e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10,<1.7.0',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Allow Django Evolution to install along with Django >= 1.7.
|
Allow Django Evolution to install along with Django >= 1.7.
As we're working toward some degree of compatibility with newer versions
of Django, we need to ease up on the version restriction. Now's a good
time to do so. Django Evolution no longer has an upper bounds on the
version range.
|
Python
|
bsd-3-clause
|
beanbaginc/django-evolution
|
#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10,<1.7.0',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Allow Django Evolution to install along with Django >= 1.7.
As we're working toward some degree of compatibility with newer versions
of Django, we need to ease up on the version restriction. Now's a good
time to do so. Django Evolution no longer has an upper bounds on the
version range.
|
#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10,<1.7.0',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Allow Django Evolution to install along with Django >= 1.7.
As we're working toward some degree of compatibility with newer versions
of Django, we need to ease up on the version restriction. Now's a good
time to do so. Django Evolution no longer has an upper bounds on the
version range.<commit_after>
|
#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10,<1.7.0',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Allow Django Evolution to install along with Django >= 1.7.
As we're working toward some degree of compatibility with newer versions
of Django, we need to ease up on the version restriction. Now's a good
time to do so. Django Evolution no longer has an upper bounds on the
version range.#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10,<1.7.0',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Allow Django Evolution to install along with Django >= 1.7.
As we're working toward some degree of compatibility with newer versions
of Django, we need to ease up on the version restriction. Now's a good
time to do so. Django Evolution no longer has an upper bounds on the
version range.<commit_after>#!/usr/bin/env python
#
# Setup script for Django Evolution
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
from setuptools.command.test import test
from django_evolution import get_package_version, VERSION
def run_tests(*args):
import os
os.system('tests/runtests.py')
test.run_tests = run_tests
PACKAGE_NAME = 'django_evolution'
download_url = (
'http://downloads.reviewboard.org/releases/django-evolution/%s.%s/' %
(VERSION[0], VERSION[1]))
# Build the package
setup(
name=PACKAGE_NAME,
version=get_package_version(),
description='A database schema evolution tool for the Django web framework.',
url='http://code.google.com/p/django-evolution/',
author='Ben Khoo',
author_email='khoobks@westnet.com.au',
maintainer='Christian Hammond',
maintainer_email='christian@beanbaginc.com',
download_url=download_url,
packages=find_packages(exclude=['tests']),
install_requires=[
'Django>=1.4.10',
],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
147197f5640f9c008b73832f6b15316e1966da1c
|
BlockServer/epics/archiver_wrapper.py
|
BlockServer/epics/archiver_wrapper.py
|
#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
urllib2.urlopen("http://localhost:4813/restart")
|
#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
res = urllib2.urlopen("http://localhost:4813/restart")
d = res.read()
|
Read returned page, just to make sure
|
Read returned page, just to make sure
|
Python
|
bsd-3-clause
|
ISISComputingGroup/EPICS-inst_servers,ISISComputingGroup/EPICS-inst_servers
|
#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
urllib2.urlopen("http://localhost:4813/restart")
Read returned page, just to make sure
|
#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
res = urllib2.urlopen("http://localhost:4813/restart")
d = res.read()
|
<commit_before>#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
urllib2.urlopen("http://localhost:4813/restart")
<commit_msg>Read returned page, just to make sure<commit_after>
|
#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
res = urllib2.urlopen("http://localhost:4813/restart")
d = res.read()
|
#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
urllib2.urlopen("http://localhost:4813/restart")
Read returned page, just to make sure#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
res = urllib2.urlopen("http://localhost:4813/restart")
d = res.read()
|
<commit_before>#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
urllib2.urlopen("http://localhost:4813/restart")
<commit_msg>Read returned page, just to make sure<commit_after>#This file is part of the ISIS IBEX application.
#Copyright (C) 2012-2016 Science & Technology Facilities Council.
#All rights reserved.
#
#This program is distributed in the hope that it will be useful.
#This program and the accompanying materials are made available under the
#terms of the Eclipse Public License v1.0 which accompanies this distribution.
#EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
#AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
#OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
#You should have received a copy of the Eclipse Public License v1.0
#along with this program; if not, you can obtain a copy from
#https://www.eclipse.org/org/documents/epl-v10.php or
#http://opensource.org/licenses/eclipse-1.0.php
import urllib2
class ArchiverWrapper(object):
def restart_archiver(self):
# Set to ignore proxy for localhost
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
urllib2.install_opener(opener)
res = urllib2.urlopen("http://localhost:4813/restart")
d = res.read()
|
14fdcdd5193816cc171120ba31112411aa0fd43d
|
rackattack/physical/coldreclaim.py
|
rackattack/physical/coldreclaim.py
|
import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
if self._hardReset == "True":
ipmi.powerCycle()
else:
ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
|
import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
ipmi.powerCycle()
# if self._hardReset == "True":
# ipmi.powerCycle()
# else:
# ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
|
Stop using soft ipmi resets until figuring out why it does not work in a lot of cases
|
Stop using soft ipmi resets until figuring out why it does not work in a lot of cases
|
Python
|
apache-2.0
|
eliran-stratoscale/rackattack-physical,eliran-stratoscale/rackattack-physical,Stratoscale/rackattack-physical,Stratoscale/rackattack-physical
|
import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
if self._hardReset == "True":
ipmi.powerCycle()
else:
ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
Stop using soft ipmi resets until figuring out why it does not work in a lot of cases
|
import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
ipmi.powerCycle()
# if self._hardReset == "True":
# ipmi.powerCycle()
# else:
# ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
|
<commit_before>import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
if self._hardReset == "True":
ipmi.powerCycle()
else:
ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
<commit_msg>Stop using soft ipmi resets until figuring out why it does not work in a lot of cases<commit_after>
|
import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
ipmi.powerCycle()
# if self._hardReset == "True":
# ipmi.powerCycle()
# else:
# ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
|
import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
if self._hardReset == "True":
ipmi.powerCycle()
else:
ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
Stop using soft ipmi resets until figuring out why it does not work in a lot of casesimport time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
ipmi.powerCycle()
# if self._hardReset == "True":
# ipmi.powerCycle()
# else:
# ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
|
<commit_before>import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
if self._hardReset == "True":
ipmi.powerCycle()
else:
ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
<commit_msg>Stop using soft ipmi resets until figuring out why it does not work in a lot of cases<commit_after>import time
import logging
import multiprocessing.pool
from rackattack.physical.ipmi import IPMI
class ColdReclaim:
_CONCURRENCY = 8
_pool = None
def __init__(self, hostname, username, password, hardReset):
self._hostname = hostname
self._username = username
self._password = password
self._hardReset = hardReset
if ColdReclaim._pool is None:
ColdReclaim._pool = multiprocessing.pool.ThreadPool(self._CONCURRENCY)
ColdReclaim._pool.apply_async(self._run)
def _run(self):
ipmi = IPMI(self._hostname, self._username, self._password)
try:
ipmi.powerCycle()
# if self._hardReset == "True":
# ipmi.powerCycle()
# else:
# ipmi.softReset()
except:
logging.exception("Unable to reclaim by cold restart '%(hostname)s'",
dict(hostname=self._hostname))
|
44351d1e48159825226478df13c648aaa83018db
|
reportlab/test/test_tools_pythonpoint.py
|
reportlab/test/test_tools_pythonpoint.py
|
"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outdir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
outDir = join(rlDir, 'test')
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
os.remove(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
|
"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outDir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
|
Fix buglet in compact testing
|
Fix buglet in compact testing
|
Python
|
bsd-3-clause
|
kanarelo/reportlab,kanarelo/reportlab,Distrotech/reportlab,Distrotech/reportlab,kanarelo/reportlab,Distrotech/reportlab,Distrotech/reportlab,Distrotech/reportlab,kanarelo/reportlab,kanarelo/reportlab
|
"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outdir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
outDir = join(rlDir, 'test')
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
os.remove(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
Fix buglet in compact testing
|
"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outDir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
|
<commit_before>"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outdir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
outDir = join(rlDir, 'test')
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
os.remove(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
<commit_msg>Fix buglet in compact testing<commit_after>
|
"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outDir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
|
"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outdir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
outDir = join(rlDir, 'test')
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
os.remove(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
Fix buglet in compact testing"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outDir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
|
<commit_before>"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outdir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
outDir = join(rlDir, 'test')
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
os.remove(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
<commit_msg>Fix buglet in compact testing<commit_after>"""Tests for the PythonPoint tool.
"""
import os, sys, string
from reportlab.test import unittest
from reportlab.test.utils import makeSuiteForClasses, outputfile
import reportlab
class PythonPointTestCase(unittest.TestCase):
"Some very crude tests on PythonPoint."
def test0(self):
"Test if pythonpoint.pdf can be created from pythonpoint.xml."
join, dirname, isfile, abspath = os.path.join, os.path.dirname, os.path.isfile, os.path.abspath
rlDir = abspath(dirname(reportlab.__file__))
from reportlab.tools.pythonpoint import pythonpoint
from reportlab.lib.utils import isCompactDistro, open_for_read
ppDir = dirname(pythonpoint.__file__)
xml = join(ppDir, 'demos', 'pythonpoint.xml')
datafilename = 'pythonpoint.pdf'
outDir = outputfile('')
if isCompactDistro():
cwd = None
xml = open_for_read(xml)
else:
cwd = os.getcwd()
os.chdir(join(ppDir, 'demos'))
pdf = join(outDir, datafilename)
if isfile(pdf): os.remove(pdf)
pythonpoint.process(xml, outDir=outDir, verbose=0, datafilename=datafilename)
if cwd: os.chdir(cwd)
assert os.path.exists(pdf)
def makeSuite():
return makeSuiteForClasses(PythonPointTestCase)
#noruntests
if __name__ == "__main__":
unittest.TextTestRunner().run(makeSuite())
|
77c97ea46280b395d0c2c1c02941f5eb6d88fde6
|
rest_framework_json_api/mixins.py
|
rest_framework_json_api/mixins.py
|
"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(self.request.QUERY_PARAMS).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
|
"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
|
Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`
|
Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`
|
Python
|
bsd-2-clause
|
Instawork/django-rest-framework-json-api,aquavitae/django-rest-framework-json-api,lukaslundgren/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,leo-naeka/rest_framework_ember,kaldras/django-rest-framework-json-api,schtibe/django-rest-framework-json-api,hnakamur/django-rest-framework-json-api,leifurhauks/django-rest-framework-json-api,django-json-api/django-rest-framework-json-api,leo-naeka/django-rest-framework-json-api,grapo/django-rest-framework-json-api,django-json-api/rest_framework_ember,pombredanne/django-rest-framework-json-api,abdulhaq-e/django-rest-framework-json-api,martinmaillard/django-rest-framework-json-api,scottfisk/django-rest-framework-json-api
|
"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(self.request.QUERY_PARAMS).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`
|
"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
|
<commit_before>"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(self.request.QUERY_PARAMS).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
<commit_msg>Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`<commit_after>
|
"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
|
"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(self.request.QUERY_PARAMS).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
|
<commit_before>"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(self.request.QUERY_PARAMS).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
<commit_msg>Fix for deprecation of `request.QUERY_PARAMS` in DRF 3.2`<commit_after>"""
Class Mixins.
"""
class MultipleIDMixin(object):
"""
Override get_queryset for multiple id support
"""
def get_queryset(self):
"""
Override :meth:``get_queryset``
"""
ids = dict(getattr(self.request, 'query_params', self.request.QUERY_PARAMS)).get('ids[]')
if ids:
self.queryset = self.queryset.filter(id__in=ids)
return self.queryset
|
cd3c1f1fbacd4d1a113249af0faf298d5afa540f
|
wikifork-convert.py
|
wikifork-convert.py
|
#!/usr/bin/env python3
from geojson import Feature, Point, FeatureCollection, dumps
wikitext = open('wiki-fork', 'r')
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
wikitext.close()
output.close()
|
#!/usr/bin/env python3
import urllib.request
from geojson import Feature, Point, FeatureCollection, dumps
wiki = urllib.request.urlopen("https://wiki.archlinux.org/index.php/ArchMap/List")
wiki_source = wiki.read()
wikitext_start = wiki_source.find(b'<pre>', wiki_source.find(b'<pre>') + 1) + 5
wikitext_end = wiki_source.find(b'</pre>', wiki_source.find(b'</pre>') + 1)
wikitext = wiki_source[wikitext_start:wikitext_end]
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
output.close()
|
Switch to parsing the wiki - UNTESTED
|
Switch to parsing the wiki - UNTESTED
|
Python
|
unlicense
|
guyfawcus/ArchMap,maelstrom59/ArchMap,guyfawcus/ArchMap,guyfawcus/ArchMap,maelstrom59/ArchMap
|
#!/usr/bin/env python3
from geojson import Feature, Point, FeatureCollection, dumps
wikitext = open('wiki-fork', 'r')
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
wikitext.close()
output.close()
Switch to parsing the wiki - UNTESTED
|
#!/usr/bin/env python3
import urllib.request
from geojson import Feature, Point, FeatureCollection, dumps
wiki = urllib.request.urlopen("https://wiki.archlinux.org/index.php/ArchMap/List")
wiki_source = wiki.read()
wikitext_start = wiki_source.find(b'<pre>', wiki_source.find(b'<pre>') + 1) + 5
wikitext_end = wiki_source.find(b'</pre>', wiki_source.find(b'</pre>') + 1)
wikitext = wiki_source[wikitext_start:wikitext_end]
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
output.close()
|
<commit_before>#!/usr/bin/env python3
from geojson import Feature, Point, FeatureCollection, dumps
wikitext = open('wiki-fork', 'r')
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
wikitext.close()
output.close()
<commit_msg>Switch to parsing the wiki - UNTESTED<commit_after>
|
#!/usr/bin/env python3
import urllib.request
from geojson import Feature, Point, FeatureCollection, dumps
wiki = urllib.request.urlopen("https://wiki.archlinux.org/index.php/ArchMap/List")
wiki_source = wiki.read()
wikitext_start = wiki_source.find(b'<pre>', wiki_source.find(b'<pre>') + 1) + 5
wikitext_end = wiki_source.find(b'</pre>', wiki_source.find(b'</pre>') + 1)
wikitext = wiki_source[wikitext_start:wikitext_end]
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
output.close()
|
#!/usr/bin/env python3
from geojson import Feature, Point, FeatureCollection, dumps
wikitext = open('wiki-fork', 'r')
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
wikitext.close()
output.close()
Switch to parsing the wiki - UNTESTED#!/usr/bin/env python3
import urllib.request
from geojson import Feature, Point, FeatureCollection, dumps
wiki = urllib.request.urlopen("https://wiki.archlinux.org/index.php/ArchMap/List")
wiki_source = wiki.read()
wikitext_start = wiki_source.find(b'<pre>', wiki_source.find(b'<pre>') + 1) + 5
wikitext_end = wiki_source.find(b'</pre>', wiki_source.find(b'</pre>') + 1)
wikitext = wiki_source[wikitext_start:wikitext_end]
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
output.close()
|
<commit_before>#!/usr/bin/env python3
from geojson import Feature, Point, FeatureCollection, dumps
wikitext = open('wiki-fork', 'r')
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
wikitext.close()
output.close()
<commit_msg>Switch to parsing the wiki - UNTESTED<commit_after>#!/usr/bin/env python3
import urllib.request
from geojson import Feature, Point, FeatureCollection, dumps
wiki = urllib.request.urlopen("https://wiki.archlinux.org/index.php/ArchMap/List")
wiki_source = wiki.read()
wikitext_start = wiki_source.find(b'<pre>', wiki_source.find(b'<pre>') + 1) + 5
wikitext_end = wiki_source.find(b'</pre>', wiki_source.find(b'</pre>') + 1)
wikitext = wiki_source[wikitext_start:wikitext_end]
output = open('output.geojson', 'w')
geo_output = []
for line in wikitext:
split = line.split('"')
coord = split[0].strip(' ')
coord = coord.split(',')
name = split[1].strip()
point = Point((float(coord[1]), float(coord[0])))
feature = Feature(geometry=point, properties={"Name": name})
geo_output.append(feature)
output.write(dumps(FeatureCollection(geo_output)))
output.close()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.