commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
17627ac4677f49e805f14acb4ba768b74d43298a
|
py3-test/tests.py
|
py3-test/tests.py
|
# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, get_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = get_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout))
|
# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, new_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
|
Use fresh event loop for asyncio test
|
Use fresh event loop for asyncio test
|
Python
|
mit
|
jfhbrook/pyee
|
# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, get_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = get_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout))
Use fresh event loop for asyncio test
|
# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, new_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
|
<commit_before># -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, get_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = get_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout))
<commit_msg>Use fresh event loop for asyncio test<commit_after>
|
# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, new_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
|
# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, get_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = get_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout))
Use fresh event loop for asyncio test# -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, new_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
|
<commit_before># -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, get_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = get_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout))
<commit_msg>Use fresh event loop for asyncio test<commit_after># -*- coding: utf-8 -*-
import nose.tools as nt
from asyncio import Future, gather, new_event_loop, sleep
from pyee import EventEmitter
def test_async_emit():
"""Test that event_emitters can handle wrapping coroutines
"""
loop = new_event_loop()
ee = EventEmitter(loop=loop)
should_call = Future(loop=loop)
@ee.on('event')
async def event_handler():
should_call.set_result(True)
async def create_timeout(loop=loop):
await sleep(1, loop=loop)
if not should_call.done():
raise Exception('should_call timed out!')
return should_call.cancel()
timeout = create_timeout(loop=loop)
@should_call.add_done_callback
def _done(result):
nt.assert_true(result)
ee.emit('event')
loop.run_until_complete(gather(should_call, timeout, loop=loop))
|
9e8e09e0f6514ae0ded77c84e5865a220c9bb0dd
|
engines/empy_engine.py
|
engines/empy_engine.py
|
#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
super(SubsystemWrapper, self).__init__(**kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return super(SubsystemWrapper, self).open(name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
|
#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
em.Subsystem.__init__(self, **kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return em.Subsystem.open(self, name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
|
Change empy Subsystem wrapper to old style class.
|
Change empy Subsystem wrapper to old style class.
|
Python
|
mit
|
blubberdiblub/eztemplate
|
#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
super(SubsystemWrapper, self).__init__(**kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return super(SubsystemWrapper, self).open(name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
Change empy Subsystem wrapper to old style class.
|
#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
em.Subsystem.__init__(self, **kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return em.Subsystem.open(self, name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
|
<commit_before>#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
super(SubsystemWrapper, self).__init__(**kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return super(SubsystemWrapper, self).open(name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
<commit_msg>Change empy Subsystem wrapper to old style class.<commit_after>
|
#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
em.Subsystem.__init__(self, **kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return em.Subsystem.open(self, name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
|
#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
super(SubsystemWrapper, self).__init__(**kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return super(SubsystemWrapper, self).open(name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
Change empy Subsystem wrapper to old style class.#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
em.Subsystem.__init__(self, **kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return em.Subsystem.open(self, name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
|
<commit_before>#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
super(SubsystemWrapper, self).__init__(**kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return super(SubsystemWrapper, self).open(name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
<commit_msg>Change empy Subsystem wrapper to old style class.<commit_after>#!/usr/bin/env python
"""Provide the empy templating engine."""
from __future__ import print_function
import os.path
import em
from . import Engine
class SubsystemWrapper(em.Subsystem):
"""Wrap EmPy's Subsystem class.
Allows to open files relative to a base directory.
"""
def __init__(self, basedir=None, **kwargs):
"""Initialize Subsystem plus a possible base directory."""
em.Subsystem.__init__(self, **kwargs)
self.basedir = basedir
def open(self, name, *args, **kwargs):
"""Open file, possibly relative to a base directory."""
if self.basedir is not None:
name = os.path.join(self.basedir, name)
return em.Subsystem.open(self, name, *args, **kwargs)
class EmpyEngine(Engine):
"""Empy templating engine."""
handle = 'empy'
def __init__(self, template, dirname=None, **kwargs):
"""Initialize empy template."""
super(EmpyEngine, self).__init__(**kwargs)
if dirname is not None:
# FIXME: This is a really bad idea, as it works like a global.
# Blame EmPy.
em.theSubsystem = SubsystemWrapper(basedir=dirname)
self.template = template
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return em.expand(self.template, mapping)
|
9ae919b1d81ca6e640dd96e6ef7aeaeba2fc2679
|
schedule/migrations/0011_event_calendar_not_null.py
|
schedule/migrations/0011_event_calendar_not_null.py
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
Sort imports per isort; fixes failure
|
Sort imports per isort; fixes failure
|
Python
|
bsd-3-clause
|
llazzaro/django-scheduler,llazzaro/django-scheduler,llazzaro/django-scheduler
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
Sort imports per isort; fixes failure
|
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
<commit_before>from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
<commit_msg>Sort imports per isort; fixes failure<commit_after>
|
import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
Sort imports per isort; fixes failureimport django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
<commit_before>from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
<commit_msg>Sort imports per isort; fixes failure<commit_after>import django.db.models.deletion
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schedule', '0010_events_set_missing_calendar'),
]
operations = [
migrations.AlterField(
model_name='event',
name='calendar',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='schedule.Calendar', verbose_name='calendar'),
),
]
|
5a5ba8bbd484f427260f699101e5e754e4a6c5d1
|
phy/utils/tests/test_color.py
|
phy/utils/tests/test_color.py
|
# -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import _random_color, _is_bright, _random_bright_color
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
assert _is_bright(_random_bright_color())
|
# -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import (_random_color, _is_bright, _random_bright_color,
_selected_clusters_colors,
)
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
for _ in range(10):
assert _is_bright(_random_bright_color())
def test_selected_clusters_colors():
assert _selected_clusters_colors().ndim == 2
assert len(_selected_clusters_colors(3)) == 3
assert len(_selected_clusters_colors(10)) == 10
|
Increase coverage in color module
|
Increase coverage in color module
|
Python
|
bsd-3-clause
|
rossant/phy,rossant/phy,rossant/phy,kwikteam/phy,kwikteam/phy,kwikteam/phy
|
# -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import _random_color, _is_bright, _random_bright_color
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
assert _is_bright(_random_bright_color())
Increase coverage in color module
|
# -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import (_random_color, _is_bright, _random_bright_color,
_selected_clusters_colors,
)
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
for _ in range(10):
assert _is_bright(_random_bright_color())
def test_selected_clusters_colors():
assert _selected_clusters_colors().ndim == 2
assert len(_selected_clusters_colors(3)) == 3
assert len(_selected_clusters_colors(10)) == 10
|
<commit_before># -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import _random_color, _is_bright, _random_bright_color
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
assert _is_bright(_random_bright_color())
<commit_msg>Increase coverage in color module<commit_after>
|
# -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import (_random_color, _is_bright, _random_bright_color,
_selected_clusters_colors,
)
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
for _ in range(10):
assert _is_bright(_random_bright_color())
def test_selected_clusters_colors():
assert _selected_clusters_colors().ndim == 2
assert len(_selected_clusters_colors(3)) == 3
assert len(_selected_clusters_colors(10)) == 10
|
# -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import _random_color, _is_bright, _random_bright_color
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
assert _is_bright(_random_bright_color())
Increase coverage in color module# -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import (_random_color, _is_bright, _random_bright_color,
_selected_clusters_colors,
)
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
for _ in range(10):
assert _is_bright(_random_bright_color())
def test_selected_clusters_colors():
assert _selected_clusters_colors().ndim == 2
assert len(_selected_clusters_colors(3)) == 3
assert len(_selected_clusters_colors(10)) == 10
|
<commit_before># -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import _random_color, _is_bright, _random_bright_color
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
assert _is_bright(_random_bright_color())
<commit_msg>Increase coverage in color module<commit_after># -*- coding: utf-8 -*-
"""Test colors."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
from pytest import mark
from .._color import (_random_color, _is_bright, _random_bright_color,
_selected_clusters_colors,
)
from ..testing import show_colored_canvas
# Skip these tests in "make test-quick".
pytestmark = mark.long
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_random_color():
color = _random_color()
show_colored_canvas(color)
for _ in range(10):
assert _is_bright(_random_bright_color())
def test_selected_clusters_colors():
assert _selected_clusters_colors().ndim == 2
assert len(_selected_clusters_colors(3)) == 3
assert len(_selected_clusters_colors(10)) == 10
|
90cea726d5105844961ea100679ace0f621fd1a7
|
grid/scoreboard_parser.py
|
grid/scoreboard_parser.py
|
from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalscore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
|
from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalScore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
|
Fix classname in scoreboard parser
|
Fix classname in scoreboard parser
|
Python
|
mit
|
bschmeck/gnarl,bschmeck/gnarl,bschmeck/gnarl
|
from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalscore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
Fix classname in scoreboard parser
|
from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalScore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
|
<commit_before>from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalscore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
<commit_msg>Fix classname in scoreboard parser<commit_after>
|
from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalScore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
|
from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalscore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
Fix classname in scoreboard parserfrom HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalScore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
|
<commit_before>from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalscore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
<commit_msg>Fix classname in scoreboard parser<commit_after>from HTMLParser import HTMLParser
class ScoreboardParser(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.scores = []
self.cur_game = None
self.get_data = False
self.get_name = False
def handle_starttag(self, tag, attrs):
if tag == 'table':
self.cur_game = []
elif tag == 'td':
if ('class', 'name') in attrs:
self.get_name = True
elif ('class', 'finalScore') in attrs:
self.get_data = True
elif ('class', 'label') in attrs and ('align', 'left') in attrs:
self.get_data = True
elif tag == 'a' and self.get_name:
for name, value in attrs:
if name == 'href':
self.cur_game.append(value.split("/")[4])
self.get_name = False
def handle_endtag(self, tag):
if tag == 'table':
self.scores.append(self.cur_game)
def handle_data(self, data):
if not self.get_data:
return
self.cur_game.append(data.strip())
self.get_data = False
|
afefe298f5e7bd628a577add02a1f49b42deffcc
|
kurt.py
|
kurt.py
|
#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
|
#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
def main():
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
if __name__ == "__main__":
main()
|
Move execution code to main()
|
Move execution code to main()
Signed-off-by: Zoran Zaric <d7dacae2c968388960bf8970080a980ed5c5dcb7@zoranzaric.de>
|
Python
|
lgpl-2.1
|
zoranzaric/kurt
|
#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
Move execution code to main()
Signed-off-by: Zoran Zaric <d7dacae2c968388960bf8970080a980ed5c5dcb7@zoranzaric.de>
|
#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
def main():
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
<commit_msg>Move execution code to main()
Signed-off-by: Zoran Zaric <d7dacae2c968388960bf8970080a980ed5c5dcb7@zoranzaric.de><commit_after>
|
#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
def main():
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
Move execution code to main()
Signed-off-by: Zoran Zaric <d7dacae2c968388960bf8970080a980ed5c5dcb7@zoranzaric.de>#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
def main():
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
<commit_msg>Move execution code to main()
Signed-off-by: Zoran Zaric <d7dacae2c968388960bf8970080a980ed5c5dcb7@zoranzaric.de><commit_after>#!/usr/bin/env python
import sys, os
MAX_PACKSIZE = 1024*1024*1024
def usage():
sys.stderr.write("usage: kurt.py <path>\n")
def get_files(path):
files = os.listdir(path)
for file in sorted(files):
file_path = os.path.join(path, file)
file_stats = os.stat(file_path)
yield (file_path, file_stats.st_size)
def get_packs(path):
singles = []
packsize = 0
paths = []
for path, size in get_files(path):
if packsize + size <= MAX_PACKSIZE:
paths.append(path)
packsize += size
elif size > MAX_PACKSIZE:
singles.append(([path], size))
else:
yield (paths, packsize)
packsize = 0
paths = []
for single in singles:
yield single
singles = []
if len(paths) > 0:
yield (paths, packsize)
def main():
if len(sys.argv) != 2:
usage()
sys.exit(1)
path = sys.argv[1]
#TODO check if path exists
for index, pack in enumerate(get_packs(path)):
paths, packsize = pack
print "Pack %d (%dB)" % (index, packsize)
for path in paths:
print " %s" % path
if __name__ == "__main__":
main()
|
cbaa1c3a74c9046b1571ba67ef85ee70d51812f5
|
chef/tests/__init__.py
|
chef/tests/__init__.py
|
import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api():
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests')
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
|
import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api(**kwargs):
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests', **kwargs)
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
|
Allow passing extra options to the test API object.
|
Allow passing extra options to the test API object.
The only one of interest is version, but yay generic code.
|
Python
|
apache-2.0
|
cread/pychef,coderanger/pychef,coderanger/pychef,Scalr/pychef,dipakvwarade/pychef,cread/pychef,jarosser06/pychef,Scalr/pychef,dipakvwarade/pychef,jarosser06/pychef
|
import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api():
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests')
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
Allow passing extra options to the test API object.
The only one of interest is version, but yay generic code.
|
import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api(**kwargs):
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests', **kwargs)
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
|
<commit_before>import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api():
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests')
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
<commit_msg>Allow passing extra options to the test API object.
The only one of interest is version, but yay generic code.<commit_after>
|
import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api(**kwargs):
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests', **kwargs)
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
|
import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api():
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests')
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
Allow passing extra options to the test API object.
The only one of interest is version, but yay generic code.import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api(**kwargs):
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests', **kwargs)
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
|
<commit_before>import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api():
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests')
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
<commit_msg>Allow passing extra options to the test API object.
The only one of interest is version, but yay generic code.<commit_after>import os
import random
from unittest2 import TestCase, skipUnless
from chef.api import ChefAPI
from chef.exceptions import ChefError
TEST_ROOT = os.path.dirname(os.path.abspath(__file__))
def skipSlowTest():
return skipUnless(os.environ.get('PYCHEF_SLOW_TESTS'), 'slow tests skipped, set $PYCHEF_SLOW_TESTS=1 to enable')
def test_chef_api(**kwargs):
return ChefAPI('https://api.opscode.com/organizations/pycheftest', os.path.join(TEST_ROOT, 'client.pem'), 'unittests', **kwargs)
class ChefTestCase(TestCase):
"""Base class for Chef unittests."""
def setUp(self):
super(ChefTestCase, self).setUp()
self.api = test_chef_api()
self.api.set_default()
self.objects = []
def tearDown(self):
for obj in self.objects:
try:
obj.delete()
except ChefError, e:
print e
# Continue running
def register(self, obj):
self.objects.append(obj)
def random(self, length=8, alphabet='0123456789abcdef'):
return ''.join(random.choice(alphabet) for _ in xrange(length))
|
9de1ce1def7915bf4587dbb0a4d9f396c77bc3b7
|
django_lightweight_queue/backends/synchronous.py
|
django_lightweight_queue/backends/synchronous.py
|
import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
|
import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
def length(self, queue):
# The length is the number of items waiting to be processed, which can
# be defined as always 0 for the synchronous backend
return 0
|
Add length for the SynchronousBackend
|
Add length for the SynchronousBackend
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,thread/django-lightweight-queue
|
import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
Add length for the SynchronousBackend
|
import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
def length(self, queue):
# The length is the number of items waiting to be processed, which can
# be defined as always 0 for the synchronous backend
return 0
|
<commit_before>import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
<commit_msg>Add length for the SynchronousBackend<commit_after>
|
import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
def length(self, queue):
# The length is the number of items waiting to be processed, which can
# be defined as always 0 for the synchronous backend
return 0
|
import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
Add length for the SynchronousBackendimport time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
def length(self, queue):
# The length is the number of items waiting to be processed, which can
# be defined as always 0 for the synchronous backend
return 0
|
<commit_before>import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
<commit_msg>Add length for the SynchronousBackend<commit_after>import time
class SynchronousBackend(object):
def enqueue(self, job, queue):
job.run()
def dequeue(self, queue, timeout):
# Cannot dequeue from the synchronous backend but we can emulate by
# never returning anything
time.sleep(timeout)
def length(self, queue):
# The length is the number of items waiting to be processed, which can
# be defined as always 0 for the synchronous backend
return 0
|
e0542c6d292bb677a1147751bc59475a81a72a75
|
flask_elasticsearch.py
|
flask_elasticsearch.py
|
from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticSearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
|
from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticsearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
|
Rename Elasticsearch to match the actual name
|
Rename Elasticsearch to match the actual name
|
Python
|
mit
|
chiangf/Flask-Elasticsearch
|
from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticSearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
Rename Elasticsearch to match the actual name
|
from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticsearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
|
<commit_before>from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticSearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
<commit_msg>Rename Elasticsearch to match the actual name<commit_after>
|
from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticsearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
|
from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticSearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
Rename Elasticsearch to match the actual namefrom elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticsearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
|
<commit_before>from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticSearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
<commit_msg>Rename Elasticsearch to match the actual name<commit_after>from elasticsearch import Elasticsearch
# Find the stack on which we want to store the database connection.
# Starting with Flask 0.9, the _app_ctx_stack is the correct one,
# before that we need to use the _request_ctx_stack.
try:
from flask import _app_ctx_stack as stack
except ImportError:
from flask import _request_ctx_stack as stack
class FlaskElasticsearch(object):
def __init__(self, app=None):
self.app = app
if app is not None:
self.init_app(app)
def init_app(self, app):
app.config.setdefault('ELASTICSEARCH_HOST', 'localhost:9200')
app.config.setdefault('ELASTICSEARCH_HTTP_AUTH', None)
# Use the newstyle teardown_appcontext if it's available,
# otherwise fall back to the request context
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
def __getattr__(self, item):
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = Elasticsearch(hosts=[ctx.app.config.get('ELASTICSEARCH_HOST')],
http_auth=ctx.app.config.get('ELASTICSEARCH_HTTP_AUTH'))
return getattr(ctx.elasticsearch, item)
def teardown(self, exception):
ctx = stack.top
if hasattr(ctx, 'elasticsearch'):
ctx.elasticsearch = None
|
7ed960ca90b76e8d256a6b94beb0e027ddaad809
|
users/views.py
|
users/views.py
|
from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me':
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
|
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
|
Fix /users/me for anonymous users
|
Fix /users/me for anonymous users
|
Python
|
bsd-3-clause
|
FreeMusicNinja/api.freemusic.ninja
|
from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me':
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
Fix /users/me for anonymous users
|
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
|
<commit_before>from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me':
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
<commit_msg>Fix /users/me for anonymous users<commit_after>
|
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
|
from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me':
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
Fix /users/me for anonymous usersfrom rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
|
<commit_before>from django.shortcuts import redirect
from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me':
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
<commit_msg>Fix /users/me for anonymous users<commit_after>from rest_framework import viewsets
from .models import User
from .permissions import IsUserOrReadOnly
from .serializers import AuthenticatedUserSerializer, UserSerializer
class UserViewSet(viewsets.ModelViewSet):
"""API endpoint for viewing and editing users."""
queryset = User.objects.all()
permission_classes = (IsUserOrReadOnly,)
def get_serializer_class(self):
return (AuthenticatedUserSerializer
if self.request.user == self.get_object()
else UserSerializer)
def retrieve(self, request, pk=None):
"""Retrieve given user or current user if ``pk`` is "me"."""
if pk == 'me' and request.user.is_authenticated():
pk = request.user.pk
self.kwargs = {'pk': pk}
return super().retrieve(request, pk)
|
923f86b3cbf21d4c442c61a00ccc801446a542cf
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti",
maintainer_email="riccardo.magliocchetti@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.6',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
|
from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti, Alberto Vara",
maintainer_email="riccardo.magliocchetti@gmail.com, a.vara.1986@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin for v 1.5',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.5',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
|
Update Setup.py with my info
|
Update Setup.py with my info
|
Python
|
apache-2.0
|
avara1986/django-admin-bootstrapped,avara1986/django-admin-bootstrapped,avara1986/django-admin-bootstrapped
|
from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti",
maintainer_email="riccardo.magliocchetti@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.6',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
Update Setup.py with my info
|
from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti, Alberto Vara",
maintainer_email="riccardo.magliocchetti@gmail.com, a.vara.1986@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin for v 1.5',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.5',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
|
<commit_before>from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti",
maintainer_email="riccardo.magliocchetti@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.6',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
<commit_msg>Update Setup.py with my info<commit_after>
|
from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti, Alberto Vara",
maintainer_email="riccardo.magliocchetti@gmail.com, a.vara.1986@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin for v 1.5',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.5',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
|
from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti",
maintainer_email="riccardo.magliocchetti@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.6',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
Update Setup.py with my infofrom setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti, Alberto Vara",
maintainer_email="riccardo.magliocchetti@gmail.com, a.vara.1986@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin for v 1.5',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.5',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
|
<commit_before>from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti",
maintainer_email="riccardo.magliocchetti@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.6',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
<commit_msg>Update Setup.py with my info<commit_after>from setuptools import setup, find_packages
import os
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
setup(
author="Riccardo Forina",
author_email="riccardo@forina.me",
maintainer="Riccardo Magliocchetti, Alberto Vara",
maintainer_email="riccardo.magliocchetti@gmail.com, a.vara.1986@gmail.com",
name='django-admin-bootstrapped',
version='2.3.6',
description='A Bootstrap theme for Django Admin for v 1.5',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
url='https://github.com/django-admin-bootstrapped/django-admin-bootstrapped',
license='BSD License',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
install_requires=[
'setuptools',
'Django>=1.5',
],
test_suite='django_admin_bootstrapped.runtests.runtests',
packages=find_packages(),
include_package_data=True,
zip_safe=False
)
|
270d66fde22e45371491c895fcf2b75dc36adc48
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC", "scikit-bio"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC <= 0.1.6",
"scikit-bio == 0.4.2"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
|
Add version requirements for fast_sparCC and scikit-bio
|
Add version requirements for fast_sparCC and scikit-bio
|
Python
|
bsd-3-clause
|
shafferm/SCNIC
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC", "scikit-bio"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
Add version requirements for fast_sparCC and scikit-bio
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC <= 0.1.6",
"scikit-bio == 0.4.2"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
|
<commit_before>from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC", "scikit-bio"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
<commit_msg>Add version requirements for fast_sparCC and scikit-bio<commit_after>
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC <= 0.1.6",
"scikit-bio == 0.4.2"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
|
from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC", "scikit-bio"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
Add version requirements for fast_sparCC and scikit-biofrom setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC <= 0.1.6",
"scikit-bio == 0.4.2"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
|
<commit_before>from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC", "scikit-bio"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
<commit_msg>Add version requirements for fast_sparCC and scikit-bio<commit_after>from setuptools import setup, find_packages
__author__ = 'shafferm'
setup(
name="SCNIC",
version="0.2.1",
setup_requires=['pytest-runner'],
test_require=['pytest'],
install_requires=["numpy", "scipy", "networkx", "biom-format", "pandas", "fast_sparCC <= 0.1.6",
"scikit-bio == 0.4.2"],
scripts=["scripts/SCNIC_analysis.py"],
packages=find_packages(),
description="A tool for finding and summarizing modules of highly correlated observations in compositional data",
author="Michael Shaffer",
author_email='michael.shaffer@ucdenver.edu',
url="https://github.com/shafferm/SCNIC/",
download_url="https://github.com/shafferm/SCNIC/tarball/0.2.1"
)
|
1655edcd359e810b10f7836dc2cbb3f99014d8f6
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup, Extension
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
#extension.extra_compile_args = ['-O0', '-g']
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
|
#!/usr/bin/env python
import sys
from distutils.core import setup, Extension
import distutils.ccompiler
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
def _checkDependencies():
compiler = distutils.ccompiler.new_compiler()
if not compiler.has_function('pcre_version', includes = ['pcre.h'], libraries = ['pcre']):
print "Failed to find pcre library."
print "\tTry to install libpcre{version}-dev package, or go to http://pcre.org"
print "\tIf not standard directories are used, set CFLAGS and LDFLAGS environment variables"
return False
return True
if 'install' in sys.argv or 'build' in sys.argv or 'build_ext' in sys.argv:
if not '--force' in sys.argv and not '--help' in sys.argv:
if not _checkDependencies():
sys.exit(-1)
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
|
Check for pcre when building
|
Check for pcre when building
|
Python
|
lgpl-2.1
|
Aldenis2112/qutepart,Aldenis2112/qutepart,andreikop/qutepart,hlamer/qutepart,hlamer/qutepart,andreikop/qutepart,andreikop/qutepart,Aldenis2112/qutepart,hlamer/qutepart,hlamer/qutepart,Aldenis2112/qutepart,Aldenis2112/qutepart,hlamer/qutepart,andreikop/qutepart,hlamer/qutepart,andreikop/qutepart,Aldenis2112/qutepart,andreikop/qutepart,Aldenis2112/qutepart,hlamer/qutepart,andreikop/qutepart,Aldenis2112/qutepart,hlamer/qutepart,andreikop/qutepart,Aldenis2112/qutepart,andreikop/qutepart,Aldenis2112/qutepart,andreikop/qutepart,hlamer/qutepart,hlamer/qutepart,hlamer/qutepart,Aldenis2112/qutepart,andreikop/qutepart
|
#!/usr/bin/env python
from distutils.core import setup, Extension
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
#extension.extra_compile_args = ['-O0', '-g']
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
Check for pcre when building
|
#!/usr/bin/env python
import sys
from distutils.core import setup, Extension
import distutils.ccompiler
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
def _checkDependencies():
compiler = distutils.ccompiler.new_compiler()
if not compiler.has_function('pcre_version', includes = ['pcre.h'], libraries = ['pcre']):
print "Failed to find pcre library."
print "\tTry to install libpcre{version}-dev package, or go to http://pcre.org"
print "\tIf not standard directories are used, set CFLAGS and LDFLAGS environment variables"
return False
return True
if 'install' in sys.argv or 'build' in sys.argv or 'build_ext' in sys.argv:
if not '--force' in sys.argv and not '--help' in sys.argv:
if not _checkDependencies():
sys.exit(-1)
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup, Extension
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
#extension.extra_compile_args = ['-O0', '-g']
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
<commit_msg>Check for pcre when building<commit_after>
|
#!/usr/bin/env python
import sys
from distutils.core import setup, Extension
import distutils.ccompiler
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
def _checkDependencies():
compiler = distutils.ccompiler.new_compiler()
if not compiler.has_function('pcre_version', includes = ['pcre.h'], libraries = ['pcre']):
print "Failed to find pcre library."
print "\tTry to install libpcre{version}-dev package, or go to http://pcre.org"
print "\tIf not standard directories are used, set CFLAGS and LDFLAGS environment variables"
return False
return True
if 'install' in sys.argv or 'build' in sys.argv or 'build_ext' in sys.argv:
if not '--force' in sys.argv and not '--help' in sys.argv:
if not _checkDependencies():
sys.exit(-1)
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
|
#!/usr/bin/env python
from distutils.core import setup, Extension
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
#extension.extra_compile_args = ['-O0', '-g']
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
Check for pcre when building#!/usr/bin/env python
import sys
from distutils.core import setup, Extension
import distutils.ccompiler
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
def _checkDependencies():
compiler = distutils.ccompiler.new_compiler()
if not compiler.has_function('pcre_version', includes = ['pcre.h'], libraries = ['pcre']):
print "Failed to find pcre library."
print "\tTry to install libpcre{version}-dev package, or go to http://pcre.org"
print "\tIf not standard directories are used, set CFLAGS and LDFLAGS environment variables"
return False
return True
if 'install' in sys.argv or 'build' in sys.argv or 'build_ext' in sys.argv:
if not '--force' in sys.argv and not '--help' in sys.argv:
if not _checkDependencies():
sys.exit(-1)
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup, Extension
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
#extension.extra_compile_args = ['-O0', '-g']
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
<commit_msg>Check for pcre when building<commit_after>#!/usr/bin/env python
import sys
from distutils.core import setup, Extension
import distutils.ccompiler
packages=['qutepart', 'qutepart/syntax']
package_data={'qutepart/syntax' : ['data/*.xml',
'data/syntax_db.json']
}
extension = Extension('qutepart.syntax.cParser',
sources = ['qutepart/syntax/cParser.c'],
libraries = ['pcre'])
def _checkDependencies():
compiler = distutils.ccompiler.new_compiler()
if not compiler.has_function('pcre_version', includes = ['pcre.h'], libraries = ['pcre']):
print "Failed to find pcre library."
print "\tTry to install libpcre{version}-dev package, or go to http://pcre.org"
print "\tIf not standard directories are used, set CFLAGS and LDFLAGS environment variables"
return False
return True
if 'install' in sys.argv or 'build' in sys.argv or 'build_ext' in sys.argv:
if not '--force' in sys.argv and not '--help' in sys.argv:
if not _checkDependencies():
sys.exit(-1)
setup (name = 'qutepart',
version = '1.0',
description = 'Code editor component for PyQt and PySide',
packages = packages,
package_data = package_data,
ext_modules = [extension])
|
c9100d10b327e98a2fc70e2279198c5f830b7704
|
setup.py
|
setup.py
|
"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.1.63',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
|
"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.2.0',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
|
Use SeleniumBase 1.2.0 (from 1.1.63)
|
Use SeleniumBase 1.2.0 (from 1.1.63)
|
Python
|
mit
|
masterqa/MasterQA,mdmintz/MasterQA
|
"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.1.63',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
Use SeleniumBase 1.2.0 (from 1.1.63)
|
"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.2.0',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
|
<commit_before>"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.1.63',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
<commit_msg>Use SeleniumBase 1.2.0 (from 1.1.63)<commit_after>
|
"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.2.0',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
|
"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.1.63',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
Use SeleniumBase 1.2.0 (from 1.1.63)"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.2.0',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
|
<commit_before>"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.1.63',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
<commit_msg>Use SeleniumBase 1.2.0 (from 1.1.63)<commit_after>"""
The setup package to install MasterQA requirements
"""
from setuptools import setup, find_packages # noqa
setup(
name='masterqa',
version='1.0.19',
url='http://masterqa.com',
author='Michael Mintz',
author_email='@mintzworld',
maintainer='Michael Mintz',
description='Automation-Assisted Manual Testing - http://masterqa.com',
license='The MIT License',
install_requires=[
'seleniumbase==1.2.0',
'flake8==2.5.4',
],
packages=['masterqa'],
entry_points={
'nose.plugins': []
}
)
|
920be9e335c5679461208da88d85b38a695b919b
|
setup.py
|
setup.py
|
"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
|
"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
|
Change version of the package for a trial upload to PyPI
|
ci: Change version of the package for a trial upload to PyPI
|
Python
|
mit
|
Nikolay-Lysenko/dsawl
|
"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
ci: Change version of the package for a trial upload to PyPI
|
"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
|
<commit_before>"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
<commit_msg>ci: Change version of the package for a trial upload to PyPI<commit_after>
|
"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
|
"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
ci: Change version of the package for a trial upload to PyPI"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
|
<commit_before>"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
<commit_msg>ci: Change version of the package for a trial upload to PyPI<commit_after>"""
Just a regular `setup.py` file.
@author: Nikolay Lysenko
"""
import os
from setuptools import setup, find_packages
current_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(current_dir, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dsawl',
version='0.0.1',
description='A set of tools for machine learning',
long_description=long_description,
url='https://github.com/Nikolay-Lysenko/dsawl',
author='Nikolay Lysenko',
author_email='nikolay.lysenko.1992@gmail.com',
license='MIT',
keywords='active_learning categorical_features feature_engineering',
packages=find_packages(exclude=['docs', 'tests', 'ci']),
python_requires='>=3.5',
install_requires=['numpy', 'pandas', 'scipy', 'scikit-learn', 'joblib']
)
|
eb76ad5309bf5c150820f159a803f26d7790ef9c
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
|
import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
|
Update trove classifiers for python 3.
|
Update trove classifiers for python 3.
|
Python
|
mit
|
coleifer/django-generic-aggregation
|
import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
Update trove classifiers for python 3.
|
import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
|
<commit_before>import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
<commit_msg>Update trove classifiers for python 3.<commit_after>
|
import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
|
import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
Update trove classifiers for python 3.import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
|
<commit_before>import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
<commit_msg>Update trove classifiers for python 3.<commit_after>import os
from setuptools import setup, find_packages
f = open(os.path.join(os.path.dirname(__file__), 'README.rst'))
readme = f.read()
f.close()
VERSION = (0, 4, 0)
setup(
name='django-generic-aggregation',
version=".".join(map(str, VERSION)),
description='annotate() and aggregate() for generically related data',
long_description=readme,
author='Charles Leifer',
author_email='coleifer@gmail.com',
url='http://github.com/coleifer/django-generic-aggregation/',
packages=find_packages(),
package_data = {
'generic_aggregation': [
],
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Framework :: Django',
],
test_suite='runtests.runtests',
)
|
2e08d602ca6ac1c4ae32d84d9013fa36f6f45f9f
|
setup.py
|
setup.py
|
#!/usr/bin/python
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github',
'dev'+date.today().isoformat().replace('-', '')),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % time.time()),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
|
Change dev package time stamp
|
Change dev package time stamp
|
Python
|
agpl-3.0
|
output/PyPagekite,lyoshenka/PyPagekite,pagekite/PyPagekite,lyoshenka/PyPagekite,pagekite/PyPagekite,output/PyPagekite,output/PyPagekite,lyoshenka/PyPagekite,pagekite/PyPagekite
|
#!/usr/bin/python
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github',
'dev'+date.today().isoformat().replace('-', '')),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
Change dev package time stamp
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % time.time()),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
|
<commit_before>#!/usr/bin/python
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github',
'dev'+date.today().isoformat().replace('-', '')),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
<commit_msg>Change dev package time stamp<commit_after>
|
#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % time.time()),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
|
#!/usr/bin/python
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github',
'dev'+date.today().isoformat().replace('-', '')),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
Change dev package time stamp#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % time.time()),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
|
<commit_before>#!/usr/bin/python
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github',
'dev'+date.today().isoformat().replace('-', '')),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
<commit_msg>Change dev package time stamp<commit_after>#!/usr/bin/python
import time
from datetime import date
from setuptools import setup
from pagekite.common import APPVER
import os
try:
# This borks sdist.
os.remove('.SELF')
except:
pass
setup(
name="pagekite",
version=APPVER.replace('github', 'dev%d' % time.time()),
license="AGPLv3+",
author="Bjarni R. Einarsson",
author_email="bre@pagekite.net",
url="http://pagekite.org/",
description="""PageKite makes localhost servers visible to the world.""",
long_description="""\
PageKite is a system for running publicly visible servers (generally
web servers) on machines without a direct connection to the Internet,
such as mobile devices or computers behind restrictive firewalls.
PageKite works around NAT, firewalls and IP-address limitations by
using a combination of tunnels and reverse proxies.
Natively supported protocols: HTTP, HTTPS
Partially supported protocols: IRC, Finger
Any other TCP-based service, including SSH and VNC, may be exposed
as well to clients supporting HTTP Proxies.
""",
packages=['pagekite', 'pagekite.ui', 'pagekite.proto'],
scripts=['scripts/pagekite', 'scripts/lapcat'],
install_requires=['SocksipyChain >= 2.0.9']
)
|
c78e7d4ca37936fd1e539bf83bb9bfdc24d2568f
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6.1'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 3.4",
#"Programming Language :: Python :: 2",
#"Programming Language :: Python :: 2.6",
#"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
|
Adjust Setup: using Python 3.4, not Python 2 supported (currently).
|
Adjust Setup: using Python 3.4, not Python 2 supported (currently).
|
Python
|
unlicense
|
BigFlySports/python-amazon-mws
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
Adjust Setup: using Python 3.4, not Python 2 supported (currently).
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6.1'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 3.4",
#"Programming Language :: Python :: 2",
#"Programming Language :: Python :: 2.6",
#"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
<commit_msg>Adjust Setup: using Python 3.4, not Python 2 supported (currently).<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6.1'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 3.4",
#"Programming Language :: Python :: 2",
#"Programming Language :: Python :: 2.6",
#"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
Adjust Setup: using Python 3.4, not Python 2 supported (currently).# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6.1'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 3.4",
#"Programming Language :: Python :: 2",
#"Programming Language :: Python :: 2.6",
#"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
<commit_msg>Adjust Setup: using Python 3.4, not Python 2 supported (currently).<commit_after># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
version = '0.6.1'
REQUIREMENTS = ['requests']
CLASSIFIERS = [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
"Programming Language :: Python :: 3.4",
#"Programming Language :: Python :: 2",
#"Programming Language :: Python :: 2.6",
#"Programming Language :: Python :: 2.7",
]
setup(
name="python-amazon-mws",
version=version,
description="A python interface for Amazon MWS",
author="Paulo Alvarado",
author_email="commonzenpython@gmail.com",
url="http://github.com/czpython/python-amazon-mws",
packages=find_packages(),
platforms=['OS Independent'],
license='LICENSE.txt',
install_requires=REQUIREMENTS,
classifiers=CLASSIFIERS,
include_package_data=True,
zip_safe=False
)
|
0b9c9f20b8ba0f4920dbbea860e69e7e09570b88
|
setup.py
|
setup.py
|
import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations', 'tests'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
|
import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
|
Remove tests module from root of package
|
Remove tests module from root of package
|
Python
|
mit
|
zniper/django-scraper,zniper/django-scraper
|
import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations', 'tests'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
Remove tests module from root of package
|
import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
|
<commit_before>import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations', 'tests'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
<commit_msg>Remove tests module from root of package<commit_after>
|
import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
|
import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations', 'tests'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
Remove tests module from root of packageimport scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
|
<commit_before>import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations', 'tests'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
<commit_msg>Remove tests module from root of package<commit_after>import scraper
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = scraper.__version__
setup(
name='django-scraper',
version=version,
description='Django application which crawls and downloads online content'
' following instructions',
long_description=open('README.rst').read(),
license='The MIT License (MIT)',
url='https://github.com/zniper/django-scraper',
author='Ha Pham',
author_email='me@zniper.net',
packages=['scraper', 'scraper.management.commands', 'scraper.migrations'],
keywords='crawl scraper spider',
install_requires=[
'requests',
'lxml',
],
)
|
1efeed053332d8f200093006fb50d2dc396e4c43
|
setup.py
|
setup.py
|
"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc', 'sqlitedict', 'lxml']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Add lxml and sqlitedict to dependencies
|
Add lxml and sqlitedict to dependencies
|
Python
|
bsd-2-clause
|
coblo/isccbench
|
"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Add lxml and sqlitedict to dependencies
|
"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc', 'sqlitedict', 'lxml']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Add lxml and sqlitedict to dependencies<commit_after>
|
"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc', 'sqlitedict', 'lxml']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
Add lxml and sqlitedict to dependencies"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc', 'sqlitedict', 'lxml']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
<commit_before>"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
<commit_msg>Add lxml and sqlitedict to dependencies<commit_after>"""
Benchmarking for the ISCC library.
"""
from setuptools import find_packages, setup
dependencies = ['click', 'isbnlib', 'pymarc', 'sqlitedict', 'lxml']
setup(
name='isccbench',
version='0.1.0',
license='BSD',
description='Benchmarking for the ISCC library.',
long_description=__doc__,
packages=find_packages(exclude=['tests']),
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=dependencies,
entry_points={
'console_scripts': [
'iscc-bench = iscc_bench.cli:main',
],
},
classifiers=[
# As from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: MacOS',
'Operating System :: Unix',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
cd20dabe9809d4448ea679506bce63786ec3e579
|
setup.py
|
setup.py
|
import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.6",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.7",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
Bump the package version - 0.1.7
|
Bump the package version - 0.1.7
Change log:
- Updated documentation for better PyPI support
- Includes all the latest fixes until now
|
Python
|
apache-2.0
|
madmaze/pytesseract
|
import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.6",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
Bump the package version - 0.1.7
Change log:
- Updated documentation for better PyPI support
- Includes all the latest fixes until now
|
import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.7",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
<commit_before>import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.6",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
<commit_msg>Bump the package version - 0.1.7
Change log:
- Updated documentation for better PyPI support
- Includes all the latest fixes until now<commit_after>
|
import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.7",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.6",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
Bump the package version - 0.1.7
Change log:
- Updated documentation for better PyPI support
- Includes all the latest fixes until nowimport os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.7",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
<commit_before>import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.6",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
<commit_msg>Bump the package version - 0.1.7
Change log:
- Updated documentation for better PyPI support
- Includes all the latest fixes until now<commit_after>import os
from setuptools import setup
longDesc = ""
if os.path.exists("README.rst"):
longDesc = open("README.rst").read().strip()
setup(
name = "pytesseract",
version = "0.1.7",
author = "Samuel Hoffstaetter",
author_email="pytesseract@madmaze.net",
maintainer = "Matthias Lee",
maintainer_email = "pytesseract@madmaze.net",
description = ("Python-tesseract is a python wrapper for google's Tesseract-OCR"),
long_description = longDesc,
license = "GPLv3",
keywords = "python-tesseract OCR Python",
url = "https://github.com/madmaze/python-tesseract",
packages=['pytesseract'],
package_dir={'pytesseract': 'src'},
package_data = {'pytesseract': ['*.png','*.jpg']},
entry_points = {'console_scripts': ['pytesseract = pytesseract.pytesseract:main']},
classifiers = [
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
]
)
|
bdf32d6bd87a1bc60cea3b5a5bfd0a665c4e3e82
|
setup.py
|
setup.py
|
"""
setup.py file for building armstrong components.
Nothing in this file should need to be edited, please see accompanying
package.json file if you need to adjust metadata about this package.
"""
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(exclude=["*.tests", "*.tests.*"]),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
|
# Nothing in this file should need to be edited.
# Use package.json to adjust metadata about this package.
# Use MANIFEST.in to include package-specific data files.
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
|
Use `find_packages()` and since we aren't building `package_data` anymore, we need to use `MANIFEST.in`. That's what it's there for and does a more obvious job. "Explicit is better than implicit." Using MANIFEST requires `include_package_data=True`.
|
Use `find_packages()` and since we aren't building `package_data` anymore, we need to use `MANIFEST.in`. That's what it's there for and does a more obvious job. "Explicit is better than implicit." Using MANIFEST requires `include_package_data=True`.
|
Python
|
apache-2.0
|
armstrong/armstrong.dev
|
"""
setup.py file for building armstrong components.
Nothing in this file should need to be edited, please see accompanying
package.json file if you need to adjust metadata about this package.
"""
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(exclude=["*.tests", "*.tests.*"]),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
Use `find_packages()` and since we aren't building `package_data` anymore, we need to use `MANIFEST.in`. That's what it's there for and does a more obvious job. "Explicit is better than implicit." Using MANIFEST requires `include_package_data=True`.
|
# Nothing in this file should need to be edited.
# Use package.json to adjust metadata about this package.
# Use MANIFEST.in to include package-specific data files.
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
|
<commit_before>"""
setup.py file for building armstrong components.
Nothing in this file should need to be edited, please see accompanying
package.json file if you need to adjust metadata about this package.
"""
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(exclude=["*.tests", "*.tests.*"]),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
<commit_msg>Use `find_packages()` and since we aren't building `package_data` anymore, we need to use `MANIFEST.in`. That's what it's there for and does a more obvious job. "Explicit is better than implicit." Using MANIFEST requires `include_package_data=True`.<commit_after>
|
# Nothing in this file should need to be edited.
# Use package.json to adjust metadata about this package.
# Use MANIFEST.in to include package-specific data files.
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
|
"""
setup.py file for building armstrong components.
Nothing in this file should need to be edited, please see accompanying
package.json file if you need to adjust metadata about this package.
"""
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(exclude=["*.tests", "*.tests.*"]),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
Use `find_packages()` and since we aren't building `package_data` anymore, we need to use `MANIFEST.in`. That's what it's there for and does a more obvious job. "Explicit is better than implicit." Using MANIFEST requires `include_package_data=True`.# Nothing in this file should need to be edited.
# Use package.json to adjust metadata about this package.
# Use MANIFEST.in to include package-specific data files.
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
|
<commit_before>"""
setup.py file for building armstrong components.
Nothing in this file should need to be edited, please see accompanying
package.json file if you need to adjust metadata about this package.
"""
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(exclude=["*.tests", "*.tests.*"]),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
<commit_msg>Use `find_packages()` and since we aren't building `package_data` anymore, we need to use `MANIFEST.in`. That's what it's there for and does a more obvious job. "Explicit is better than implicit." Using MANIFEST requires `include_package_data=True`.<commit_after># Nothing in this file should need to be edited.
# Use package.json to adjust metadata about this package.
# Use MANIFEST.in to include package-specific data files.
import os
import json
from setuptools import setup, find_packages
info = json.load(open("./package.json"))
def generate_namespaces(package):
i = package.count(".")
while i:
yield package.rsplit(".", i)[0]
i -= 1
NAMESPACE_PACKAGES = list(generate_namespaces(info['name']))
if os.path.exists("MANIFEST"):
os.unlink("MANIFEST")
setup_kwargs = {
"author": "Bay Citizen & Texas Tribune",
"author_email": "dev@armstrongcms.org",
"url": "http://github.com/armstrong/%s/" % info["name"],
"packages": find_packages(),
"namespace_packages": NAMESPACE_PACKAGES,
"include_package_data": True,
"classifiers": [
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
}
setup_kwargs.update(info)
setup(**setup_kwargs)
|
c0a479ad3bbfd0f2f77c628ee10fd01675a942b9
|
main.py
|
main.py
|
import optparse
from bounty import *
from peers import *
from settings import *
def main():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate-factor',
default=None,
help='Minimum funds:reward ratio you'll propagate bounties at')
(options, args) = parser.parse_args()
overrides = {}
for key in options:
if settings.defaults.get(key) is not None:
overrides = overrides.update({key:settings.default.get(key)})
settings.setup(overrides)
if __name__ == "__main__":
main()
|
from bounty import *
from peers import *
import settings
def main():
settings.setup()
print "settings are:"
print settings.config
if __name__ == "__main__":
main()
|
Move configs to settings module
|
Move configs to settings module
|
Python
|
mit
|
gappleto97/Senior-Project
|
import optparse
from bounty import *
from peers import *
from settings import *
def main():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate-factor',
default=None,
help='Minimum funds:reward ratio you'll propagate bounties at')
(options, args) = parser.parse_args()
overrides = {}
for key in options:
if settings.defaults.get(key) is not None:
overrides = overrides.update({key:settings.default.get(key)})
settings.setup(overrides)
if __name__ == "__main__":
main()
Move configs to settings module
|
from bounty import *
from peers import *
import settings
def main():
settings.setup()
print "settings are:"
print settings.config
if __name__ == "__main__":
main()
|
<commit_before>import optparse
from bounty import *
from peers import *
from settings import *
def main():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate-factor',
default=None,
help='Minimum funds:reward ratio you'll propagate bounties at')
(options, args) = parser.parse_args()
overrides = {}
for key in options:
if settings.defaults.get(key) is not None:
overrides = overrides.update({key:settings.default.get(key)})
settings.setup(overrides)
if __name__ == "__main__":
main()
<commit_msg>Move configs to settings module<commit_after>
|
from bounty import *
from peers import *
import settings
def main():
settings.setup()
print "settings are:"
print settings.config
if __name__ == "__main__":
main()
|
import optparse
from bounty import *
from peers import *
from settings import *
def main():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate-factor',
default=None,
help='Minimum funds:reward ratio you'll propagate bounties at')
(options, args) = parser.parse_args()
overrides = {}
for key in options:
if settings.defaults.get(key) is not None:
overrides = overrides.update({key:settings.default.get(key)})
settings.setup(overrides)
if __name__ == "__main__":
main()
Move configs to settings modulefrom bounty import *
from peers import *
import settings
def main():
settings.setup()
print "settings are:"
print settings.config
if __name__ == "__main__":
main()
|
<commit_before>import optparse
from bounty import *
from peers import *
from settings import *
def main():
parser = optparse.OptionParser()
parser.add_option('-c',
'--charity',
dest='charity',
default=None,
action="store_true",
help='Sets whether you accept rewardless bounties')
parser.add_option('-l',
'--latency',
dest='accept_latency',
default=None,
help='Maximum acceptable latency from a server')
parser.add_option('-f',
'--propagation-factor',
dest='propagate-factor',
default=None,
help='Minimum funds:reward ratio you'll propagate bounties at')
(options, args) = parser.parse_args()
overrides = {}
for key in options:
if settings.defaults.get(key) is not None:
overrides = overrides.update({key:settings.default.get(key)})
settings.setup(overrides)
if __name__ == "__main__":
main()
<commit_msg>Move configs to settings module<commit_after>from bounty import *
from peers import *
import settings
def main():
settings.setup()
print "settings are:"
print settings.config
if __name__ == "__main__":
main()
|
2b3c566b7558ffa2fe9377705f07525470232bb6
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.6"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
|
#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.8"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
|
Update the irclib dependency to version 0.4.8.
|
Update the irclib dependency to version 0.4.8.
Add an explicit url in dependency_links for irclib's SourceForge
download page. The url currently in PyPI is stale, so the download
fails.
It looks like the upstream package is now called "irc", so
investigate porting cobe to that in the future.
Fixes #3: "Depends on outdated irclib?"
|
Python
|
mit
|
DarkMio/cobe,meska/cobe,wodim/cobe-ng,pteichman/cobe,LeMagnesium/cobe,wodim/cobe-ng,pteichman/cobe,LeMagnesium/cobe,DarkMio/cobe,tiagochiavericosta/cobe,meska/cobe,tiagochiavericosta/cobe
|
#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.6"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
Update the irclib dependency to version 0.4.8.
Add an explicit url in dependency_links for irclib's SourceForge
download page. The url currently in PyPI is stale, so the download
fails.
It looks like the upstream package is now called "irc", so
investigate porting cobe to that in the future.
Fixes #3: "Depends on outdated irclib?"
|
#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.8"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
|
<commit_before>#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.6"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
<commit_msg>Update the irclib dependency to version 0.4.8.
Add an explicit url in dependency_links for irclib's SourceForge
download page. The url currently in PyPI is stale, so the download
fails.
It looks like the upstream package is now called "irc", so
investigate porting cobe to that in the future.
Fixes #3: "Depends on outdated irclib?"<commit_after>
|
#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.8"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
|
#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.6"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
Update the irclib dependency to version 0.4.8.
Add an explicit url in dependency_links for irclib's SourceForge
download page. The url currently in PyPI is stale, so the download
fails.
It looks like the upstream package is now called "irc", so
investigate porting cobe to that in the future.
Fixes #3: "Depends on outdated irclib?"#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.8"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
|
<commit_before>#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.6"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
<commit_msg>Update the irclib dependency to version 0.4.8.
Add an explicit url in dependency_links for irclib's SourceForge
download page. The url currently in PyPI is stale, so the download
fails.
It looks like the upstream package is now called "irc", so
investigate porting cobe to that in the future.
Fixes #3: "Depends on outdated irclib?"<commit_after>#!/usr/bin/env python
# Require setuptools. See http://pypi.python.org/pypi/setuptools for
# installation instructions, or run the ez_setup script found at
# http://peak.telecommunity.com/dist/ez_setup.py
from setuptools import setup, find_packages
setup(
name = "cobe",
version = "2.0.2",
author = "Peter Teichman",
author_email = "peter@teichman.org",
url = "http://wiki.github.com/pteichman/cobe/",
description = "Markov chain based text generator library and chatbot",
packages = ["cobe"],
test_suite = "tests",
setup_requires = [
"nose==1.1.2",
"coverage==3.5"
],
install_requires = [
"PyStemmer==1.2.0",
"argparse==1.2.1",
"python-irclib==0.4.8"
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Scientific/Engineering :: Artificial Intelligence"
],
entry_points = {
"console_scripts" : [
"cobe = cobe.control:main"
]
}
)
|
9a23af9ba053e669e9c3750e72251228f32b4a86
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.7',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import ptwit
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version=ptwit.__version__,
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author=ptwit.__author__,
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license=ptwit.__license__,
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
|
Use imported version and author from ptwit
|
Use imported version and author from ptwit
|
Python
|
mit
|
ptpt/ptwit
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.7',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
Use imported version and author from ptwit
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import ptwit
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version=ptwit.__version__,
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author=ptwit.__author__,
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license=ptwit.__license__,
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.7',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
<commit_msg>Use imported version and author from ptwit<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import ptwit
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version=ptwit.__version__,
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author=ptwit.__author__,
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license=ptwit.__license__,
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.7',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
Use imported version and author from ptwit#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import ptwit
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version=ptwit.__version__,
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author=ptwit.__author__,
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license=ptwit.__license__,
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.7',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
<commit_msg>Use imported version and author from ptwit<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import ptwit
requires = ['python-twitter>=1.0']
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version=ptwit.__version__,
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='http://github.com/ptpt/ptwit',
author=ptwit.__author__,
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license=ptwit.__license__,
py_modules=['ptwit'],
install_requires=requires,
entry_points={
'console_scripts': ['ptwit=ptwit:cmd']},
zip_safe=False)
|
79967811ffdd739bd7a653f4644eec5c4b014625
|
setup.py
|
setup.py
|
"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.0',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=find_packages(exclude=['build', 'dist', 'docs']),
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
|
"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.3',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=[
'asyncdef',
'asyncdef.interfaces',
'asyncdef.interfaces.engine',
],
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
|
Fix packaging to resolve the PEP420 namespace
|
Fix packaging to resolve the PEP420 namespace
Setuptools is still lacking support for PEP480 namespace packages
when using the find_packages function. Until it does all packages,
including the namespace, must be registered in the packages list.
|
Python
|
apache-2.0
|
asyncdef/interfaces
|
"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.0',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=find_packages(exclude=['build', 'dist', 'docs']),
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
Fix packaging to resolve the PEP420 namespace
Setuptools is still lacking support for PEP480 namespace packages
when using the find_packages function. Until it does all packages,
including the namespace, must be registered in the packages list.
|
"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.3',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=[
'asyncdef',
'asyncdef.interfaces',
'asyncdef.interfaces.engine',
],
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
|
<commit_before>"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.0',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=find_packages(exclude=['build', 'dist', 'docs']),
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
<commit_msg>Fix packaging to resolve the PEP420 namespace
Setuptools is still lacking support for PEP480 namespace packages
when using the find_packages function. Until it does all packages,
including the namespace, must be registered in the packages list.<commit_after>
|
"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.3',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=[
'asyncdef',
'asyncdef.interfaces',
'asyncdef.interfaces.engine',
],
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
|
"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.0',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=find_packages(exclude=['build', 'dist', 'docs']),
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
Fix packaging to resolve the PEP420 namespace
Setuptools is still lacking support for PEP480 namespace packages
when using the find_packages function. Until it does all packages,
including the namespace, must be registered in the packages list."""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.3',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=[
'asyncdef',
'asyncdef.interfaces',
'asyncdef.interfaces.engine',
],
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
|
<commit_before>"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.0',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=find_packages(exclude=['build', 'dist', 'docs']),
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
<commit_msg>Fix packaging to resolve the PEP420 namespace
Setuptools is still lacking support for PEP480 namespace packages
when using the find_packages function. Until it does all packages,
including the namespace, must be registered in the packages list.<commit_after>"""Setuptools configuration for interfaces."""
from setuptools import setup
from setuptools import find_packages
with open('README.rst', 'r') as readmefile:
README = readmefile.read()
setup(
name='asyncdef.interfaces',
version='0.1.3',
url='https://github.com/asyncdef/interfaces',
description='Public APIs for the core asyncdef components.',
author="Kevin Conway",
author_email="kevinjacobconway@gmail.com",
long_description=README,
license='Apache 2.0',
packages=[
'asyncdef',
'asyncdef.interfaces',
'asyncdef.interfaces.engine',
],
install_requires=[
'iface',
],
extras_require={
'testing': [
'pep257',
'pep8',
'pyenchant',
'pyflakes',
'pylint',
],
},
entry_points={
'console_scripts': [
],
},
include_package_data=True,
zip_safe=False,
)
|
5e61fcd8245467b52a97a289565f7eb5496e10c9
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
|
from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib',
'validators'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
|
Add passlib as package requirement
|
Add passlib as package requirement
|
Python
|
agpl-3.0
|
eReuse/DeviceHub,eReuse/DeviceHub
|
from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
Add passlib as package requirement
|
from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib',
'validators'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
<commit_msg>Add passlib as package requirement<commit_after>
|
from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib',
'validators'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
|
from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
Add passlib as package requirementfrom setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib',
'validators'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
<commit_msg>Add passlib as package requirement<commit_after>from setuptools import setup, find_packages
setup(
name='DeviceHub',
version='0.1',
packages=find_packages(),
url='https://github.com/eReuse/DeviceHub',
license='AGPLv3 License',
author='eReuse team',
author_email='x.bustamante@ereuse.org',
description='The DeviceHub is a Device Management System (DMS) created under the project eReuse. Its purpose is to '
'offer a way for donors and receivers to efficiently manage the reuse process ensuring final recycling.',
install_requires=[
'inflection',
'eve', # Which has a bug, for now... todo try 0.6.2 when stable
'passlib',
'validators'
],
include_package_data=True,
long_description="""
Credits:
Icons made by <a href="http://www.freepik.com" title="Freepik">Freepik</a> from <a href="http://www.flaticon.com" title="Flaticon">www.flaticon.com</a> is licensed by <a href="http://creativecommons.org/licenses/by/3.0/" title="Creative Commons BY 3.0">CC BY 3.0</a>
"""
)
|
991ed46bcc0886e310c43b33ada4aad0d95991a0
|
mooc.py
|
mooc.py
|
import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid in reader:
data_in[cid] = (author_id, parent_cid.strip())
data_out = collections.defaultdict(int)
for source_author_id, parent_cid in data_in.values():
if len(parent_cid) > 0:
data_out[(source_author_id, data_in[parent_cid][0])] += 1
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight'])
for k, v in data_out.items():
writer.writerow(list(k) + [v])
|
import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid, step, txt, *x in reader:
data_in[cid] = (author_id, parent_cid.strip(), txt)
data_out = {}
for source_author_id, parent_cid, txt in data_in.values():
if len(parent_cid) > 0:
k = (source_author_id, data_in[parent_cid][0])
try:
v = data_out[k]
v[0] += 1
v[1] += txt
except KeyError:
data_out[k] = [1, txt]
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight', 'comments'])
writer.writerows(list(k) + v for k, v in data_out.items())
|
Add text of comments on the end of the output
|
Add text of comments on the end of the output
|
Python
|
mit
|
tlocke/mooc
|
import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid in reader:
data_in[cid] = (author_id, parent_cid.strip())
data_out = collections.defaultdict(int)
for source_author_id, parent_cid in data_in.values():
if len(parent_cid) > 0:
data_out[(source_author_id, data_in[parent_cid][0])] += 1
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight'])
for k, v in data_out.items():
writer.writerow(list(k) + [v])
Add text of comments on the end of the output
|
import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid, step, txt, *x in reader:
data_in[cid] = (author_id, parent_cid.strip(), txt)
data_out = {}
for source_author_id, parent_cid, txt in data_in.values():
if len(parent_cid) > 0:
k = (source_author_id, data_in[parent_cid][0])
try:
v = data_out[k]
v[0] += 1
v[1] += txt
except KeyError:
data_out[k] = [1, txt]
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight', 'comments'])
writer.writerows(list(k) + v for k, v in data_out.items())
|
<commit_before>import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid in reader:
data_in[cid] = (author_id, parent_cid.strip())
data_out = collections.defaultdict(int)
for source_author_id, parent_cid in data_in.values():
if len(parent_cid) > 0:
data_out[(source_author_id, data_in[parent_cid][0])] += 1
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight'])
for k, v in data_out.items():
writer.writerow(list(k) + [v])
<commit_msg>Add text of comments on the end of the output<commit_after>
|
import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid, step, txt, *x in reader:
data_in[cid] = (author_id, parent_cid.strip(), txt)
data_out = {}
for source_author_id, parent_cid, txt in data_in.values():
if len(parent_cid) > 0:
k = (source_author_id, data_in[parent_cid][0])
try:
v = data_out[k]
v[0] += 1
v[1] += txt
except KeyError:
data_out[k] = [1, txt]
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight', 'comments'])
writer.writerows(list(k) + v for k, v in data_out.items())
|
import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid in reader:
data_in[cid] = (author_id, parent_cid.strip())
data_out = collections.defaultdict(int)
for source_author_id, parent_cid in data_in.values():
if len(parent_cid) > 0:
data_out[(source_author_id, data_in[parent_cid][0])] += 1
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight'])
for k, v in data_out.items():
writer.writerow(list(k) + [v])
Add text of comments on the end of the outputimport csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid, step, txt, *x in reader:
data_in[cid] = (author_id, parent_cid.strip(), txt)
data_out = {}
for source_author_id, parent_cid, txt in data_in.values():
if len(parent_cid) > 0:
k = (source_author_id, data_in[parent_cid][0])
try:
v = data_out[k]
v[0] += 1
v[1] += txt
except KeyError:
data_out[k] = [1, txt]
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight', 'comments'])
writer.writerows(list(k) + v for k, v in data_out.items())
|
<commit_before>import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid in reader:
data_in[cid] = (author_id, parent_cid.strip())
data_out = collections.defaultdict(int)
for source_author_id, parent_cid in data_in.values():
if len(parent_cid) > 0:
data_out[(source_author_id, data_in[parent_cid][0])] += 1
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight'])
for k, v in data_out.items():
writer.writerow(list(k) + [v])
<commit_msg>Add text of comments on the end of the output<commit_after>import csv
import collections
import argparse
parser = argparse.ArgumentParser(description='Restructure MOOC CSV.')
parser.add_argument('input_csv', type=open)
args = parser.parse_args()
data_in = {}
with args.input_csv as csvin:
reader = csv.reader(csvin)
next(reader) # skip titles
for cid, author_id, parent_cid, step, txt, *x in reader:
data_in[cid] = (author_id, parent_cid.strip(), txt)
data_out = {}
for source_author_id, parent_cid, txt in data_in.values():
if len(parent_cid) > 0:
k = (source_author_id, data_in[parent_cid][0])
try:
v = data_out[k]
v[0] += 1
v[1] += txt
except KeyError:
data_out[k] = [1, txt]
with open('output.csv', 'w') as csvout:
writer = csv.writer(csvout)
writer.writerow(['source', 'target', 'weight', 'comments'])
writer.writerows(list(k) + v for k, v in data_out.items())
|
bc628f58327a9b325f7ce1e3f8e56946d289aa66
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='http://oss.theautomatedtester.co.uk/bugzilla',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
|
from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='https://github.com/AutomatedTester/Bugsy',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
|
Make the Bugsy PyPI page link to GitHub
|
Make the Bugsy PyPI page link to GitHub
Since there is currently no mention of the repo there.
Signed-off-by: AutomatedTester <3d61f6450d7e43c8b567795ed24e9858346487a0@mozilla.com>
|
Python
|
apache-2.0
|
AutomatedTester/Bugsy
|
from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='http://oss.theautomatedtester.co.uk/bugzilla',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
Make the Bugsy PyPI page link to GitHub
Since there is currently no mention of the repo there.
Signed-off-by: AutomatedTester <3d61f6450d7e43c8b567795ed24e9858346487a0@mozilla.com>
|
from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='https://github.com/AutomatedTester/Bugsy',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
|
<commit_before>from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='http://oss.theautomatedtester.co.uk/bugzilla',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
<commit_msg>Make the Bugsy PyPI page link to GitHub
Since there is currently no mention of the repo there.
Signed-off-by: AutomatedTester <3d61f6450d7e43c8b567795ed24e9858346487a0@mozilla.com><commit_after>
|
from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='https://github.com/AutomatedTester/Bugsy',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
|
from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='http://oss.theautomatedtester.co.uk/bugzilla',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
Make the Bugsy PyPI page link to GitHub
Since there is currently no mention of the repo there.
Signed-off-by: AutomatedTester <3d61f6450d7e43c8b567795ed24e9858346487a0@mozilla.com>from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='https://github.com/AutomatedTester/Bugsy',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
|
<commit_before>from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='http://oss.theautomatedtester.co.uk/bugzilla',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
<commit_msg>Make the Bugsy PyPI page link to GitHub
Since there is currently no mention of the repo there.
Signed-off-by: AutomatedTester <3d61f6450d7e43c8b567795ed24e9858346487a0@mozilla.com><commit_after>from setuptools import setup, find_packages
setup(name='bugsy',
version='0.4.1',
description='A library for interacting Bugzilla Native REST API',
author='David Burns',
author_email='david.burns at theautomatedtester dot co dot uk',
url='https://github.com/AutomatedTester/Bugsy',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Libraries',
'Programming Language :: Python'],
packages = find_packages(),
install_requires=['requests>=1.1.0'],
)
|
0075942e2900f58f2f8bd82d0d71b49e08665123
|
openfisca_france_indirect_taxation/tests/base.py
|
openfisca_france_indirect_taxation/tests/base.py
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
tax_benefit_system.prefill_cache()
|
Add prefill_cache to make test_categorie_fiscale pass
|
Add prefill_cache to make test_categorie_fiscale pass
|
Python
|
agpl-3.0
|
benjello/openfisca-france-indirect-taxation,openfisca/openfisca-france-indirect-taxation,antoinearnoud/openfisca-france-indirect-taxation,thomasdouenne/openfisca-france-indirect-taxation
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
Add prefill_cache to make test_categorie_fiscale pass
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
tax_benefit_system.prefill_cache()
|
<commit_before># -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
<commit_msg>Add prefill_cache to make test_categorie_fiscale pass<commit_after>
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
tax_benefit_system.prefill_cache()
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
Add prefill_cache to make test_categorie_fiscale pass# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
tax_benefit_system.prefill_cache()
|
<commit_before># -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
<commit_msg>Add prefill_cache to make test_categorie_fiscale pass<commit_after># -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_core.tools import assert_near
from .. import init_country
__all__ = [
'assert_near',
'tax_benefit_system',
'TaxBenefitSystem',
]
TaxBenefitSystem = init_country()
tax_benefit_system = TaxBenefitSystem()
tax_benefit_system.prefill_cache()
|
62ab9336545305509c24efed38660ee26bbe7c0f
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.9',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.10',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
Update the PyPI version to 7.0.10.
|
Update the PyPI version to 7.0.10.
|
Python
|
mit
|
Doist/todoist-python
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.9',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 7.0.10.
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.10',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.9',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 7.0.10.<commit_after>
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.10',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.9',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Update the PyPI version to 7.0.10.# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.10',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.9',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Update the PyPI version to 7.0.10.<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='7.0.10',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
16b5b60d06e7979baaa0428b489af42ab6f64a4d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import __version__
setup(
version = __version__,
license = "Unlicense",
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import (
__version__,
__license__
)
setup(
version = __version__,
license = __license__,
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
|
Use the __license__ from the package __init__
|
Use the __license__ from the package __init__
|
Python
|
unlicense
|
MrS0m30n3/google-translate,MrS0m30n3/google-translate
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import __version__
setup(
version = __version__,
license = "Unlicense",
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
Use the __license__ from the package __init__
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import (
__version__,
__license__
)
setup(
version = __version__,
license = __license__,
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import __version__
setup(
version = __version__,
license = "Unlicense",
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
<commit_msg>Use the __license__ from the package __init__<commit_after>
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import (
__version__,
__license__
)
setup(
version = __version__,
license = __license__,
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import __version__
setup(
version = __version__,
license = "Unlicense",
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
Use the __license__ from the package __init__#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import (
__version__,
__license__
)
setup(
version = __version__,
license = __license__,
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import __version__
setup(
version = __version__,
license = "Unlicense",
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
<commit_msg>Use the __license__ from the package __init__<commit_after>#!/usr/bin/env python
# -*- coding: UTF-8 -*-
"""google-translate package setup file."""
from setuptools import setup
from google_translate import (
__version__,
__license__
)
setup(
version = __version__,
license = __license__,
name = "doodle-translate",
author_email = "ytubedlg@gmail.com",
author = "Sotiris Papadopoulos",
url = "https://github.com/MrS0m30n3/google-translate",
description = "Small Python library to translate text for free using the Google translate.",
packages = ["google_translate"],
scripts = ["bin/google-translate"],
install_requires = ["twodict", "mock"],
package_data = {
"google_translate": ["data/languages"]
},
classifiers = [
"Intended Audience :: Developers",
"License :: Public Domain",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Libraries :: Python Modules"
],
keywords = "google translate language free"
)
|
a34315818fd1f6d270b3226892e3b9005dbfbc16
|
setup.py
|
setup.py
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info[:2] <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
Fix bad python 2.6 detection
|
Fix bad python 2.6 detection
|
Python
|
mit
|
vguzmanp/dataset,stefanw/dataset,twds/dataset,pudo/dataset,saimn/dataset,askebos/dataset,reubano/dataset
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
Fix bad python 2.6 detection
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info[:2] <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
<commit_before>import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
<commit_msg>Fix bad python 2.6 detection<commit_after>
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info[:2] <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
Fix bad python 2.6 detectionimport sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info[:2] <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
<commit_before>import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
<commit_msg>Fix bad python 2.6 detection<commit_after>import sys
from setuptools import setup, find_packages
py26_dependency = []
if sys.version_info[:2] <= (2, 6):
py26_dependency = ["argparse >= 1.2.1", "ordereddict >= 1.1"]
setup(
name='dataset',
version='0.4.0',
description="Toolkit for Python-based data processing.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3'
],
keywords='sql sqlalchemy etl loading utility',
author='Friedrich Lindenberg, Gregor Aisch',
author_email='info@okfn.org',
url='http://github.com/pudo/dataset',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'test']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
'sqlalchemy >= 0.9.1',
'alembic >= 0.6.2',
'python-slugify >= 0.0.6',
"PyYAML >= 3.10"
] + py26_dependency,
tests_require=[],
test_suite='test',
entry_points={
'console_scripts': [
'datafreeze = dataset.freeze.app:main',
]
}
)
|
e1a5ed9bb7d9a8f397396c2ae4fdf50a6d1ba89c
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-httpd = sgg.app.httpd:main',
],
},
package_data = {'sgg': ['web/static/*']},
)
|
#! /usr/bin/env python
import os
import glob
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-%s = sgg.app.%s:main' % (n, n)
for n in [
os.path.basename(n)[:-3]
for n in glob.glob('sgg/app/*.py')
if not n.endswith('__init__.py')
]
],
},
package_data = {'sgg': ['web/static/*']},
)
|
Automate registration of application main functions: every module in sgg.app must have a main() and is a named console_script.
|
Automate registration of application main functions: every module in sgg.app must have a main() and is a named console_script.
|
Python
|
agpl-3.0
|
nejucomo/sgg,nejucomo/sgg,nejucomo/sgg
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-httpd = sgg.app.httpd:main',
],
},
package_data = {'sgg': ['web/static/*']},
)
Automate registration of application main functions: every module in sgg.app must have a main() and is a named console_script.
|
#! /usr/bin/env python
import os
import glob
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-%s = sgg.app.%s:main' % (n, n)
for n in [
os.path.basename(n)[:-3]
for n in glob.glob('sgg/app/*.py')
if not n.endswith('__init__.py')
]
],
},
package_data = {'sgg': ['web/static/*']},
)
|
<commit_before>#! /usr/bin/env python
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-httpd = sgg.app.httpd:main',
],
},
package_data = {'sgg': ['web/static/*']},
)
<commit_msg>Automate registration of application main functions: every module in sgg.app must have a main() and is a named console_script.<commit_after>
|
#! /usr/bin/env python
import os
import glob
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-%s = sgg.app.%s:main' % (n, n)
for n in [
os.path.basename(n)[:-3]
for n in glob.glob('sgg/app/*.py')
if not n.endswith('__init__.py')
]
],
},
package_data = {'sgg': ['web/static/*']},
)
|
#! /usr/bin/env python
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-httpd = sgg.app.httpd:main',
],
},
package_data = {'sgg': ['web/static/*']},
)
Automate registration of application main functions: every module in sgg.app must have a main() and is a named console_script.#! /usr/bin/env python
import os
import glob
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-%s = sgg.app.%s:main' % (n, n)
for n in [
os.path.basename(n)[:-3]
for n in glob.glob('sgg/app/*.py')
if not n.endswith('__init__.py')
]
],
},
package_data = {'sgg': ['web/static/*']},
)
|
<commit_before>#! /usr/bin/env python
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-httpd = sgg.app.httpd:main',
],
},
package_data = {'sgg': ['web/static/*']},
)
<commit_msg>Automate registration of application main functions: every module in sgg.app must have a main() and is a named console_script.<commit_after>#! /usr/bin/env python
import os
import glob
from setuptools import setup, find_packages
setup(name='spiralgalaxygame',
description='Spiral Galaxy Game',
url='https://github.com/nejucomo/spiralgalaxygame',
license='GPLv3',
version='0.1.dev0',
author='Nathan Wilcox',
author_email='nejucomo@gmail.com',
packages=find_packages(),
install_requires=[
'twisted >= 13.1',
],
entry_points = {
'console_scripts': [
'sgg-%s = sgg.app.%s:main' % (n, n)
for n in [
os.path.basename(n)[:-3]
for n in glob.glob('sgg/app/*.py')
if not n.endswith('__init__.py')
]
],
},
package_data = {'sgg': ['web/static/*']},
)
|
03dee283f0cdbf917d2ff3cbee3fbe45e0b0e430
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='aweber_api',
version='1.1.3',
packages=find_packages(exclude=['tests']),
url='https://github.com/aweber/AWeber-API-Python-Library',
install_requires = [
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require = [
'dingus',
'coverage',
],
setup_requires = [
'nose',
],
include_package_data=True
)
|
from setuptools import setup, find_packages
from sys import version
if version < '2.2.3':
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(
name='aweber_api',
version='1.1.3',
author='AWeber Dev Team',
author_email='api@aweber.com',
maintainer='AWeber API Team',
maintainer_email='api@aweber.com',
url='https://github.com/aweber/AWeber-API-Python-Library',
download_url='http://pypi.python.org/pypi/aweber_api',
description='The AWeber API Python Library allows you to quickly get up '
'and running with integrating access to the AWeber API into your '
'Python applications.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
packages=find_packages(exclude=['tests']),
install_requires=[
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require=[
'dingus',
'coverage',
],
setup_requires=[
'nose',
],
include_package_data=True
)
|
Update metadata used by pypi
|
Update metadata used by pypi
|
Python
|
bsd-3-clause
|
aweber/AWeber-API-Python-Library
|
from setuptools import setup, find_packages
setup(
name='aweber_api',
version='1.1.3',
packages=find_packages(exclude=['tests']),
url='https://github.com/aweber/AWeber-API-Python-Library',
install_requires = [
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require = [
'dingus',
'coverage',
],
setup_requires = [
'nose',
],
include_package_data=True
)
Update metadata used by pypi
|
from setuptools import setup, find_packages
from sys import version
if version < '2.2.3':
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(
name='aweber_api',
version='1.1.3',
author='AWeber Dev Team',
author_email='api@aweber.com',
maintainer='AWeber API Team',
maintainer_email='api@aweber.com',
url='https://github.com/aweber/AWeber-API-Python-Library',
download_url='http://pypi.python.org/pypi/aweber_api',
description='The AWeber API Python Library allows you to quickly get up '
'and running with integrating access to the AWeber API into your '
'Python applications.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
packages=find_packages(exclude=['tests']),
install_requires=[
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require=[
'dingus',
'coverage',
],
setup_requires=[
'nose',
],
include_package_data=True
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='aweber_api',
version='1.1.3',
packages=find_packages(exclude=['tests']),
url='https://github.com/aweber/AWeber-API-Python-Library',
install_requires = [
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require = [
'dingus',
'coverage',
],
setup_requires = [
'nose',
],
include_package_data=True
)
<commit_msg>Update metadata used by pypi<commit_after>
|
from setuptools import setup, find_packages
from sys import version
if version < '2.2.3':
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(
name='aweber_api',
version='1.1.3',
author='AWeber Dev Team',
author_email='api@aweber.com',
maintainer='AWeber API Team',
maintainer_email='api@aweber.com',
url='https://github.com/aweber/AWeber-API-Python-Library',
download_url='http://pypi.python.org/pypi/aweber_api',
description='The AWeber API Python Library allows you to quickly get up '
'and running with integrating access to the AWeber API into your '
'Python applications.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
packages=find_packages(exclude=['tests']),
install_requires=[
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require=[
'dingus',
'coverage',
],
setup_requires=[
'nose',
],
include_package_data=True
)
|
from setuptools import setup, find_packages
setup(
name='aweber_api',
version='1.1.3',
packages=find_packages(exclude=['tests']),
url='https://github.com/aweber/AWeber-API-Python-Library',
install_requires = [
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require = [
'dingus',
'coverage',
],
setup_requires = [
'nose',
],
include_package_data=True
)
Update metadata used by pypifrom setuptools import setup, find_packages
from sys import version
if version < '2.2.3':
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(
name='aweber_api',
version='1.1.3',
author='AWeber Dev Team',
author_email='api@aweber.com',
maintainer='AWeber API Team',
maintainer_email='api@aweber.com',
url='https://github.com/aweber/AWeber-API-Python-Library',
download_url='http://pypi.python.org/pypi/aweber_api',
description='The AWeber API Python Library allows you to quickly get up '
'and running with integrating access to the AWeber API into your '
'Python applications.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
packages=find_packages(exclude=['tests']),
install_requires=[
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require=[
'dingus',
'coverage',
],
setup_requires=[
'nose',
],
include_package_data=True
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='aweber_api',
version='1.1.3',
packages=find_packages(exclude=['tests']),
url='https://github.com/aweber/AWeber-API-Python-Library',
install_requires = [
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require = [
'dingus',
'coverage',
],
setup_requires = [
'nose',
],
include_package_data=True
)
<commit_msg>Update metadata used by pypi<commit_after>from setuptools import setup, find_packages
from sys import version
if version < '2.2.3':
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(
name='aweber_api',
version='1.1.3',
author='AWeber Dev Team',
author_email='api@aweber.com',
maintainer='AWeber API Team',
maintainer_email='api@aweber.com',
url='https://github.com/aweber/AWeber-API-Python-Library',
download_url='http://pypi.python.org/pypi/aweber_api',
description='The AWeber API Python Library allows you to quickly get up '
'and running with integrating access to the AWeber API into your '
'Python applications.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Web Environment',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: BSD License',
],
packages=find_packages(exclude=['tests']),
install_requires=[
'httplib2>=0.7.0',
'oauth2>=1.2',
],
tests_require=[
'dingus',
'coverage',
],
setup_requires=[
'nose',
],
include_package_data=True
)
|
003ddb7fe8577c6dcd41c6fe460199e027785f71
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
|
from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
'mcrypto.mcrypto_project.portfolio.urls',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
|
Split `urls.py` to separate modules
|
Split `urls.py` to separate modules
|
Python
|
bsd-3-clause
|
mcouthon/mcrypto,mcouthon/mcrypto
|
from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
Split `urls.py` to separate modules
|
from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
'mcrypto.mcrypto_project.portfolio.urls',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
|
<commit_before>from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
<commit_msg>Split `urls.py` to separate modules<commit_after>
|
from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
'mcrypto.mcrypto_project.portfolio.urls',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
|
from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
Split `urls.py` to separate modulesfrom setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
'mcrypto.mcrypto_project.portfolio.urls',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
|
<commit_before>from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
<commit_msg>Split `urls.py` to separate modules<commit_after>from setuptools import setup
setup(
name='mcrypto',
version='0.1',
author='Pavel Brodsky',
author_email='mcouthon@gmail.com',
packages=[
'mcrypto',
'mcrypto.mcrypto_project',
'mcrypto.mcrypto_project.mcrypto_project',
'mcrypto.mcrypto_project.portfolio',
'mcrypto.mcrypto_project.portfolio.migrations',
'mcrypto.mcrypto_project.portfolio.views',
'mcrypto.mcrypto_project.portfolio.urls',
],
description='A cryptocurrency portfolio manager',
install_requires=[
'Django>=1.11.1',
],
)
|
9b84808f44d9b67aafa9e10f13301ab4ed368f0f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.1',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.2',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
|
Bump version for new pypi dist
|
Bump version for new pypi dist
|
Python
|
bsd-2-clause
|
ysimonson/optfn
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.1',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
Bump version for new pypi dist
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.2',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.1',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
<commit_msg>Bump version for new pypi dist<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.2',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.1',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
Bump version for new pypi dist#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.2',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.1',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
<commit_msg>Bump version for new pypi dist<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(name='optfunc-ysimonson',
version='0.1.2',
description='Generate commandline flags from function arguments.',
author='Simon Willison',
author_email='simon@lanyrd.com',
url='https://github.com/ysimonson/optfunc',
license='BSD',
classifiers=['Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2 :: Only',
'Environment :: Console',
'Development Status :: 4 - Beta'],
py_modules=['optfunc'])
|
aa2a2a57030dec2e8b73b017de5f157aae0fb5e5
|
tests/qtgui/qpixmap_test.py
|
tests/qtgui/qpixmap_test.py
|
import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant
#Only test if is possible create a QPixmap from a QVariant
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
if __name__ == '__main__':
unittest.main()
|
import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant, QSize, QString
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
def testQSizeConstructor(self):
pixmap = QPixmap(QSize(10,20))
self.assert_(pixmap.size().height(), 20)
def testQStringConstructor(self):
pixmap = QPixmap(QString("Testing!"))
if __name__ == '__main__':
unittest.main()
|
Improve qpixmap test to support qstring and qsize arguments.
|
Improve qpixmap test to support qstring and qsize arguments.
Reviewed by Marcelo Lira <marcelo.lira@openbossa.org>
|
Python
|
lgpl-2.1
|
BadSingleton/pyside2,enthought/pyside,gbaty/pyside2,IronManMark20/pyside2,gbaty/pyside2,gbaty/pyside2,M4rtinK/pyside-android,PySide/PySide,pankajp/pyside,pankajp/pyside,PySide/PySide,M4rtinK/pyside-bb10,BadSingleton/pyside2,IronManMark20/pyside2,enthought/pyside,BadSingleton/pyside2,IronManMark20/pyside2,M4rtinK/pyside-android,M4rtinK/pyside-bb10,qtproject/pyside-pyside,gbaty/pyside2,M4rtinK/pyside-bb10,M4rtinK/pyside-android,qtproject/pyside-pyside,pankajp/pyside,IronManMark20/pyside2,RobinD42/pyside,enthought/pyside,PySide/PySide,BadSingleton/pyside2,M4rtinK/pyside-bb10,M4rtinK/pyside-android,M4rtinK/pyside-bb10,gbaty/pyside2,BadSingleton/pyside2,enthought/pyside,qtproject/pyside-pyside,RobinD42/pyside,RobinD42/pyside,RobinD42/pyside,M4rtinK/pyside-bb10,enthought/pyside,PySide/PySide,qtproject/pyside-pyside,RobinD42/pyside,M4rtinK/pyside-android,enthought/pyside,enthought/pyside,pankajp/pyside,RobinD42/pyside,qtproject/pyside-pyside,IronManMark20/pyside2,M4rtinK/pyside-android,PySide/PySide,RobinD42/pyside,pankajp/pyside
|
import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant
#Only test if is possible create a QPixmap from a QVariant
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
if __name__ == '__main__':
unittest.main()
Improve qpixmap test to support qstring and qsize arguments.
Reviewed by Marcelo Lira <marcelo.lira@openbossa.org>
|
import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant, QSize, QString
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
def testQSizeConstructor(self):
pixmap = QPixmap(QSize(10,20))
self.assert_(pixmap.size().height(), 20)
def testQStringConstructor(self):
pixmap = QPixmap(QString("Testing!"))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant
#Only test if is possible create a QPixmap from a QVariant
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
if __name__ == '__main__':
unittest.main()
<commit_msg>Improve qpixmap test to support qstring and qsize arguments.
Reviewed by Marcelo Lira <marcelo.lira@openbossa.org><commit_after>
|
import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant, QSize, QString
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
def testQSizeConstructor(self):
pixmap = QPixmap(QSize(10,20))
self.assert_(pixmap.size().height(), 20)
def testQStringConstructor(self):
pixmap = QPixmap(QString("Testing!"))
if __name__ == '__main__':
unittest.main()
|
import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant
#Only test if is possible create a QPixmap from a QVariant
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
if __name__ == '__main__':
unittest.main()
Improve qpixmap test to support qstring and qsize arguments.
Reviewed by Marcelo Lira <marcelo.lira@openbossa.org>import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant, QSize, QString
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
def testQSizeConstructor(self):
pixmap = QPixmap(QSize(10,20))
self.assert_(pixmap.size().height(), 20)
def testQStringConstructor(self):
pixmap = QPixmap(QString("Testing!"))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant
#Only test if is possible create a QPixmap from a QVariant
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
if __name__ == '__main__':
unittest.main()
<commit_msg>Improve qpixmap test to support qstring and qsize arguments.
Reviewed by Marcelo Lira <marcelo.lira@openbossa.org><commit_after>import unittest
from helper import UsesQApplication
from PySide.QtGui import QPixmap
from PySide.QtCore import QVariant, QSize, QString
class QPixmapTest(UsesQApplication):
def testQVariantConstructor(self):
pixmap = QPixmap()
v = QVariant(pixmap)
pixmap_copy = QPixmap(v)
def testQSizeConstructor(self):
pixmap = QPixmap(QSize(10,20))
self.assert_(pixmap.size().height(), 20)
def testQStringConstructor(self):
pixmap = QPixmap(QString("Testing!"))
if __name__ == '__main__':
unittest.main()
|
5315638ddba8ca38ea57aea7650a6a233d0f5872
|
icekit/plugins/image/admin.py
|
icekit/plugins/image/admin.py
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', 'image']
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
Add filename to image search
|
Add filename to image search
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
Add filename to image search
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', 'image']
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
<commit_before>from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
<commit_msg>Add filename to image search<commit_after>
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', 'image']
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
Add filename to image searchfrom django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', 'image']
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
<commit_before>from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
<commit_msg>Add filename to image search<commit_after>from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'title', 'alt_text',]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', 'image']
change_form_template = 'image/admin/change_form.html'
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
14eba692295bc5391e35a6b1f32a40ad0d6b30d9
|
wa/__init__.py
|
wa/__init__.py
|
from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkUiautoWorkload, ReventWorkload
|
from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkWorkload, ApkUiautoWorkload, ReventWorkload
|
Add ApkWorkload to default imports
|
wa: Add ApkWorkload to default imports
|
Python
|
apache-2.0
|
setrofim/workload-automation,ARM-software/workload-automation,setrofim/workload-automation,lisatn/workload-automation,ARM-software/workload-automation,setrofim/workload-automation,setrofim/workload-automation,lisatn/workload-automation,ARM-software/workload-automation,lisatn/workload-automation,lisatn/workload-automation,ARM-software/workload-automation
|
from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkUiautoWorkload, ReventWorkload
wa: Add ApkWorkload to default imports
|
from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkWorkload, ApkUiautoWorkload, ReventWorkload
|
<commit_before>from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkUiautoWorkload, ReventWorkload
<commit_msg>wa: Add ApkWorkload to default imports<commit_after>
|
from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkWorkload, ApkUiautoWorkload, ReventWorkload
|
from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkUiautoWorkload, ReventWorkload
wa: Add ApkWorkload to default importsfrom wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkWorkload, ApkUiautoWorkload, ReventWorkload
|
<commit_before>from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkUiautoWorkload, ReventWorkload
<commit_msg>wa: Add ApkWorkload to default imports<commit_after>from wa.framework import pluginloader, signal
from wa.framework.command import Command, ComplexCommand, SubCommand
from wa.framework.configuration import settings
from wa.framework.configuration.core import Status
from wa.framework.exception import HostError, JobError, InstrumentError, ConfigError
from wa.framework.exception import (ResultProcessorError, ResourceError,
CommandError, ToolError)
from wa.framework.exception import (WAError, NotFoundError, ValidationError,
WorkloadError)
from wa.framework.exception import WorkerThreadError, PluginLoaderError
from wa.framework.instrumentation import (Instrument, very_slow, slow, normal, fast,
very_fast)
from wa.framework.plugin import Plugin, Parameter
from wa.framework.processor import ResultProcessor
from wa.framework.resource import (NO_ONE, JarFile, ApkFile, ReventFile, File,
Executable)
from wa.framework.workload import Workload, ApkWorkload, ApkUiautoWorkload, ReventWorkload
|
701756f48fc627ed1f9cef77e6ad887c2e95e0f8
|
allauth/socialaccount/providers/discord/views.py
|
allauth/socialaccount/providers/discord/views.py
|
import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discordapp.com/api/oauth2/token'
authorize_url = 'https://discordapp.com/api/oauth2/authorize'
profile_url = 'https://discordapp.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
|
import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discord.com/api/oauth2/token'
authorize_url = 'https://discord.com/api/oauth2/authorize'
profile_url = 'https://discord.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
|
Switch to new API domain
|
fix(discord): Switch to new API domain
|
Python
|
mit
|
pennersr/django-allauth,pennersr/django-allauth,pennersr/django-allauth,rsalmaso/django-allauth,rsalmaso/django-allauth,rsalmaso/django-allauth
|
import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discordapp.com/api/oauth2/token'
authorize_url = 'https://discordapp.com/api/oauth2/authorize'
profile_url = 'https://discordapp.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
fix(discord): Switch to new API domain
|
import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discord.com/api/oauth2/token'
authorize_url = 'https://discord.com/api/oauth2/authorize'
profile_url = 'https://discord.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
|
<commit_before>import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discordapp.com/api/oauth2/token'
authorize_url = 'https://discordapp.com/api/oauth2/authorize'
profile_url = 'https://discordapp.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
<commit_msg>fix(discord): Switch to new API domain<commit_after>
|
import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discord.com/api/oauth2/token'
authorize_url = 'https://discord.com/api/oauth2/authorize'
profile_url = 'https://discord.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
|
import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discordapp.com/api/oauth2/token'
authorize_url = 'https://discordapp.com/api/oauth2/authorize'
profile_url = 'https://discordapp.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
fix(discord): Switch to new API domainimport requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discord.com/api/oauth2/token'
authorize_url = 'https://discord.com/api/oauth2/authorize'
profile_url = 'https://discord.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
|
<commit_before>import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discordapp.com/api/oauth2/token'
authorize_url = 'https://discordapp.com/api/oauth2/authorize'
profile_url = 'https://discordapp.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
<commit_msg>fix(discord): Switch to new API domain<commit_after>import requests
from allauth.socialaccount.providers.discord.provider import DiscordProvider
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter,
OAuth2CallbackView,
OAuth2LoginView,
)
class DiscordOAuth2Adapter(OAuth2Adapter):
provider_id = DiscordProvider.id
access_token_url = 'https://discord.com/api/oauth2/token'
authorize_url = 'https://discord.com/api/oauth2/authorize'
profile_url = 'https://discord.com/api/users/@me'
def complete_login(self, request, app, token, **kwargs):
headers = {
'Authorization': 'Bearer {0}'.format(token.token),
'Content-Type': 'application/json',
}
extra_data = requests.get(self.profile_url, headers=headers)
return self.get_provider().sociallogin_from_response(
request,
extra_data.json()
)
oauth2_login = OAuth2LoginView.adapter_view(DiscordOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(DiscordOAuth2Adapter)
|
a2736b4c4c4d6d004a7d055e7e9f0436a7be5b3d
|
gaphor/UML/__init__.py
|
gaphor/UML/__init__.py
|
from gaphor.UML.collection import collection
from gaphor.UML.uml2 import *
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML import modelfactory as model
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
__all__ = ['collection', 'context', 'diagram', 'element', 'elementfactory', 'event', 'interface', 'modelfactory',
'properties', 'uml2', 'umlfmt', 'umllex']
|
from gaphor.UML import modelfactory as model
from gaphor.UML.collection import collection
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML.uml2 import *
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
|
Fix * imports for building UML with Python3
|
Fix * imports for building UML with Python3
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
from gaphor.UML.collection import collection
from gaphor.UML.uml2 import *
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML import modelfactory as model
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
__all__ = ['collection', 'context', 'diagram', 'element', 'elementfactory', 'event', 'interface', 'modelfactory',
'properties', 'uml2', 'umlfmt', 'umllex']
Fix * imports for building UML with Python3
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
|
from gaphor.UML import modelfactory as model
from gaphor.UML.collection import collection
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML.uml2 import *
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
|
<commit_before>from gaphor.UML.collection import collection
from gaphor.UML.uml2 import *
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML import modelfactory as model
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
__all__ = ['collection', 'context', 'diagram', 'element', 'elementfactory', 'event', 'interface', 'modelfactory',
'properties', 'uml2', 'umlfmt', 'umllex']
<commit_msg>Fix * imports for building UML with Python3
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after>
|
from gaphor.UML import modelfactory as model
from gaphor.UML.collection import collection
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML.uml2 import *
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
|
from gaphor.UML.collection import collection
from gaphor.UML.uml2 import *
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML import modelfactory as model
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
__all__ = ['collection', 'context', 'diagram', 'element', 'elementfactory', 'event', 'interface', 'modelfactory',
'properties', 'uml2', 'umlfmt', 'umllex']
Fix * imports for building UML with Python3
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>from gaphor.UML import modelfactory as model
from gaphor.UML.collection import collection
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML.uml2 import *
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
|
<commit_before>from gaphor.UML.collection import collection
from gaphor.UML.uml2 import *
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML import modelfactory as model
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
__all__ = ['collection', 'context', 'diagram', 'element', 'elementfactory', 'event', 'interface', 'modelfactory',
'properties', 'uml2', 'umlfmt', 'umllex']
<commit_msg>Fix * imports for building UML with Python3
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after>from gaphor.UML import modelfactory as model
from gaphor.UML.collection import collection
from gaphor.UML.elementfactory import ElementFactory
from gaphor.UML.uml2 import *
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
|
e60563e28ce08a850809aef696a348c84359ece2
|
gore/tests/test_api.py
|
gore/tests/test_api.py
|
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
assert len(list_resp) == len(events)
assert list_resp[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
event_list = list_resp['events']
assert len(event_list) == len(events)
assert event_list[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
Add search to events API
|
Add search to events API
|
Python
|
mit
|
akx/gentry,akx/gentry,akx/gentry,akx/gentry
|
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
assert len(list_resp) == len(events)
assert list_resp[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
Add search to events API
|
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
event_list = list_resp['events']
assert len(event_list) == len(events)
assert event_list[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
<commit_before>import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
assert len(list_resp) == len(events)
assert list_resp[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
<commit_msg>Add search to events API<commit_after>
|
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
event_list = list_resp['events']
assert len(event_list) == len(events)
assert event_list[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
assert len(list_resp) == len(events)
assert list_resp[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
Add search to events APIimport json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
event_list = list_resp['events']
assert len(event_list) == len(events)
assert event_list[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
<commit_before>import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
assert len(list_resp) == len(events)
assert list_resp[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
<commit_msg>Add search to events API<commit_after>import json
import pytest
from django.utils.encoding import force_text
from gore.models import Event
from gore.tests.data import exc_payload
@pytest.mark.django_db
def test_events_api(project, admin_client):
events = [
Event.objects.create_from_raven(project_id=project.id, body=json.loads(exc_payload))
for i
in range(10)
]
list_resp = json.loads(force_text(admin_client.get('/api/events/').content))
event_list = list_resp['events']
assert len(event_list) == len(events)
assert event_list[0]['id'] == events[-1].id
for event in events:
detail_resp = json.loads(force_text(admin_client.get('/api/event/{id}/'.format(id=event.id)).content))
assert detail_resp['id'] == event.id
def test_events_api_auth(client):
assert client.get('/api/events/').status_code >= 400
|
61fe85842a0c932c4a8375f657eb06f406344ace
|
bumblebee/modules/caffeine.py
|
bumblebee/modules/caffeine.py
|
# pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
#pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xdg-screensaver
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text="")
)
self._active = False
self.interval(1)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def state(self, widget):
if self._active:
return "activated"
return "deactivated"
def _toggle(self, event):
self._active = not self._active
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
else:
bumblebee.util.execute("notify-send \"Out of coffee\"")
def update(self, widgets):
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Use xdg-screensaver instead of xset
|
Use xdg-screensaver instead of xset
|
Python
|
mit
|
tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status
|
# pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
Use xdg-screensaver instead of xset
|
#pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xdg-screensaver
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text="")
)
self._active = False
self.interval(1)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def state(self, widget):
if self._active:
return "activated"
return "deactivated"
def _toggle(self, event):
self._active = not self._active
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
else:
bumblebee.util.execute("notify-send \"Out of coffee\"")
def update(self, widgets):
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
<commit_before># pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
<commit_msg>Use xdg-screensaver instead of xset<commit_after>
|
#pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xdg-screensaver
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text="")
)
self._active = False
self.interval(1)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def state(self, widget):
if self._active:
return "activated"
return "deactivated"
def _toggle(self, event):
self._active = not self._active
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
else:
bumblebee.util.execute("notify-send \"Out of coffee\"")
def update(self, widgets):
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
# pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
Use xdg-screensaver instead of xset#pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xdg-screensaver
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text="")
)
self._active = False
self.interval(1)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def state(self, widget):
if self._active:
return "activated"
return "deactivated"
def _toggle(self, event):
self._active = not self._active
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
else:
bumblebee.util.execute("notify-send \"Out of coffee\"")
def update(self, widgets):
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
<commit_before># pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
<commit_msg>Use xdg-screensaver instead of xset<commit_after>#pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xdg-screensaver
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text="")
)
self._active = False
self.interval(1)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def state(self, widget):
if self._active:
return "activated"
return "deactivated"
def _toggle(self, event):
self._active = not self._active
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
else:
bumblebee.util.execute("notify-send \"Out of coffee\"")
def update(self, widgets):
if self._active:
bumblebee.util.execute("xdg-screensaver reset")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
ea67ec01a1f91b44dd34d8c58921f8c29a4c054a
|
aragog/routing/client_error.py
|
aragog/routing/client_error.py
|
"""
Client Error HTTP Status Callables
"""
class HTTP404(object):
"""
HTTP 404 Response
"""
def __call__(self, environ, start_response):
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
|
"""
Client Error HTTP Status Callables
"""
def HTTP404(environ, start_response):
"""
HTTP 404 Response
"""
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
|
Convert HTTP404 to a function.
|
Convert HTTP404 to a function.
|
Python
|
apache-2.0
|
bramwelt/aragog
|
"""
Client Error HTTP Status Callables
"""
class HTTP404(object):
"""
HTTP 404 Response
"""
def __call__(self, environ, start_response):
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
Convert HTTP404 to a function.
|
"""
Client Error HTTP Status Callables
"""
def HTTP404(environ, start_response):
"""
HTTP 404 Response
"""
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
|
<commit_before>"""
Client Error HTTP Status Callables
"""
class HTTP404(object):
"""
HTTP 404 Response
"""
def __call__(self, environ, start_response):
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
<commit_msg>Convert HTTP404 to a function.<commit_after>
|
"""
Client Error HTTP Status Callables
"""
def HTTP404(environ, start_response):
"""
HTTP 404 Response
"""
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
|
"""
Client Error HTTP Status Callables
"""
class HTTP404(object):
"""
HTTP 404 Response
"""
def __call__(self, environ, start_response):
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
Convert HTTP404 to a function."""
Client Error HTTP Status Callables
"""
def HTTP404(environ, start_response):
"""
HTTP 404 Response
"""
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
|
<commit_before>"""
Client Error HTTP Status Callables
"""
class HTTP404(object):
"""
HTTP 404 Response
"""
def __call__(self, environ, start_response):
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
<commit_msg>Convert HTTP404 to a function.<commit_after>"""
Client Error HTTP Status Callables
"""
def HTTP404(environ, start_response):
"""
HTTP 404 Response
"""
start_response('404 NOT FOUND', [('Content-Type', 'text/plain')])
return ['']
|
c244b84def159bc4d4e281fe39ebe06886a109d2
|
tests/__init__.py
|
tests/__init__.py
|
from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure(
DEFAULT_INDEX_TABLESPACE='',
)
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
|
from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure()
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
|
Simplify configure of django settings
|
Simplify configure of django settings
|
Python
|
mit
|
yola/drf-madprops
|
from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure(
DEFAULT_INDEX_TABLESPACE='',
)
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
Simplify configure of django settings
|
from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure()
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
|
<commit_before>from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure(
DEFAULT_INDEX_TABLESPACE='',
)
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
<commit_msg>Simplify configure of django settings<commit_after>
|
from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure()
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
|
from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure(
DEFAULT_INDEX_TABLESPACE='',
)
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
Simplify configure of django settingsfrom django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure()
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
|
<commit_before>from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure(
DEFAULT_INDEX_TABLESPACE='',
)
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
<commit_msg>Simplify configure of django settings<commit_after>from django.conf import settings
from mock import Mock, patch
from unittest2 import TestCase
settings.configure()
class TestPreference(object):
def __init__(self, name, value, user=None):
self.name = name
self.value = value
self.user = user
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return '<{name}:{value}:{user}>'.format(**self.__dict__)
class TestUser(object):
@property
def preferences(self):
return Mock(all=Mock(return_value=self._preferences))
@preferences.setter
def preferences(self, value):
self._preferences = [
TestPreference(k, v) for k, v in value.iteritems()]
class SerializerTestCase(TestCase):
def patch_from_native(self):
patcher = patch(
'madprops.serializers.ModelSerializer.from_native',
new=lambda self, data, files: TestPreference(
data['name'], data['value'], data.get('user'))
)
self.patched_from_native = patcher.start()
self.addCleanup(patcher.stop)
|
cdb8dc9b027338b9facae047dd7c303944205a05
|
dask_mesos/__init__.py
|
dask_mesos/__init__.py
|
from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.ERROR,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
|
from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.INFO,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
|
Set default logging level to INFO
|
Set default logging level to INFO
|
Python
|
apache-2.0
|
lensacom/dask.mesos
|
from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.ERROR,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
Set default logging level to INFO
|
from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.INFO,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
|
<commit_before>from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.ERROR,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
<commit_msg>Set default logging level to INFO<commit_after>
|
from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.INFO,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
|
from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.ERROR,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
Set default logging level to INFOfrom __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.INFO,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
|
<commit_before>from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.ERROR,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
<commit_msg>Set default logging level to INFO<commit_after>from __future__ import absolute_import, division, print_function
import logging
logging.basicConfig(level=logging.INFO,
format='%(relativeCreated)6d %(threadName)s %(message)s')
from .satyr import get
from .delayed import mesos
|
0f54bb7a1a26bb3e7192b30cc426fbaeb92caaed
|
tests/utils/test_settings.py
|
tests/utils/test_settings.py
|
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setting_creation(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
|
from app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setitem(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
def test_getitem(self):
setting = Setting(name='foo', value='bar')
db.session.add(setting)
db.session.commit()
# We need to delete the Setting dictionary cache manually,
# since we didn't add the setting through the AppSettings interface
cache.delete_memoized(Setting.as_dict)
self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
|
Add __getitem__ test for AppSettings
|
Add __getitem__ test for AppSettings
|
Python
|
mit
|
Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger
|
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setting_creation(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
Add __getitem__ test for AppSettings
|
from app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setitem(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
def test_getitem(self):
setting = Setting(name='foo', value='bar')
db.session.add(setting)
db.session.commit()
# We need to delete the Setting dictionary cache manually,
# since we didn't add the setting through the AppSettings interface
cache.delete_memoized(Setting.as_dict)
self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
|
<commit_before>from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setting_creation(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
<commit_msg>Add __getitem__ test for AppSettings<commit_after>
|
from app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setitem(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
def test_getitem(self):
setting = Setting(name='foo', value='bar')
db.session.add(setting)
db.session.commit()
# We need to delete the Setting dictionary cache manually,
# since we didn't add the setting through the AppSettings interface
cache.delete_memoized(Setting.as_dict)
self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
|
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setting_creation(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
Add __getitem__ test for AppSettingsfrom app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setitem(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
def test_getitem(self):
setting = Setting(name='foo', value='bar')
db.session.add(setting)
db.session.commit()
# We need to delete the Setting dictionary cache manually,
# since we didn't add the setting through the AppSettings interface
cache.delete_memoized(Setting.as_dict)
self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
|
<commit_before>from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setting_creation(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
<commit_msg>Add __getitem__ test for AppSettings<commit_after>from app import db, cache
from app.models import Setting
from tests.general import AppTestCase
class TestAppSettings(AppTestCase):
def test_setitem(self):
self.app.config['SETTINGS']['foo'] = 'bar'
setting = Setting.query.filter_by(name='foo').first()
self.assertEqual(setting.value, 'bar')
self.app.config['SETTINGS']['foo'] = 'foobar'
self.assertEqual(setting.value, 'foobar')
def test_getitem(self):
setting = Setting(name='foo', value='bar')
db.session.add(setting)
db.session.commit()
# We need to delete the Setting dictionary cache manually,
# since we didn't add the setting through the AppSettings interface
cache.delete_memoized(Setting.as_dict)
self.assertEqual(self.app.config['SETTINGS']['foo'], 'bar')
|
fbbfe256cd23f87e5aad1dc4858c5e7c7753352b
|
cmd2/__init__.py
|
cmd2/__init__.py
|
#
# -*- coding: utf-8 -*-
|
#
# -*- coding: utf-8 -*-
from .cmd2 import __version__, Cmd, AddSubmenu, CmdResult, categorize
from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
|
Add default imports back in
|
Add default imports back in
|
Python
|
mit
|
python-cmd2/cmd2,python-cmd2/cmd2
|
#
# -*- coding: utf-8 -*-
Add default imports back in
|
#
# -*- coding: utf-8 -*-
from .cmd2 import __version__, Cmd, AddSubmenu, CmdResult, categorize
from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
|
<commit_before>#
# -*- coding: utf-8 -*-
<commit_msg>Add default imports back in<commit_after>
|
#
# -*- coding: utf-8 -*-
from .cmd2 import __version__, Cmd, AddSubmenu, CmdResult, categorize
from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
|
#
# -*- coding: utf-8 -*-
Add default imports back in#
# -*- coding: utf-8 -*-
from .cmd2 import __version__, Cmd, AddSubmenu, CmdResult, categorize
from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
|
<commit_before>#
# -*- coding: utf-8 -*-
<commit_msg>Add default imports back in<commit_after>#
# -*- coding: utf-8 -*-
from .cmd2 import __version__, Cmd, AddSubmenu, CmdResult, categorize
from .cmd2 import with_argument_list, with_argparser, with_argparser_and_unknown_args, with_category
|
c7b69d7248c37e984aac1df64e44ae5037d3fad5
|
mysite/deployment_settings.py
|
mysite/deployment_settings.py
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=['all@openhatch.org']
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
|
Correct the way we set ADMINS
|
Correct the way we set ADMINS
|
Python
|
agpl-3.0
|
sudheesh001/oh-mainline,moijes12/oh-mainline,SnappleCap/oh-mainline,vipul-sharma20/oh-mainline,mzdaniel/oh-mainline,campbe13/openhatch,sudheesh001/oh-mainline,sudheesh001/oh-mainline,jledbetter/openhatch,vipul-sharma20/oh-mainline,SnappleCap/oh-mainline,SnappleCap/oh-mainline,sudheesh001/oh-mainline,ehashman/oh-mainline,jledbetter/openhatch,willingc/oh-mainline,campbe13/openhatch,onceuponatimeforever/oh-mainline,willingc/oh-mainline,mzdaniel/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,sudheesh001/oh-mainline,SnappleCap/oh-mainline,vipul-sharma20/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,willingc/oh-mainline,mzdaniel/oh-mainline,openhatch/oh-mainline,eeshangarg/oh-mainline,onceuponatimeforever/oh-mainline,moijes12/oh-mainline,openhatch/oh-mainline,waseem18/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,ehashman/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,heeraj123/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch,ehashman/oh-mainline,moijes12/oh-mainline,mzdaniel/oh-mainline,nirmeshk/oh-mainline,vipul-sharma20/oh-mainline,jledbetter/openhatch,moijes12/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline,waseem18/oh-mainline,eeshangarg/oh-mainline,campbe13/openhatch,ojengwa/oh-mainline,nirmeshk/oh-mainline,ojengwa/oh-mainline,campbe13/openhatch,onceuponatimeforever/oh-mainline,moijes12/oh-mainline,openhatch/oh-mainline,Changaco/oh-mainline,ojengwa/oh-mainline,openhatch/oh-mainline,waseem18/oh-mainline,Changaco/oh-mainline,nirmeshk/oh-mainline,jledbetter/openhatch,SnappleCap/oh-mainline,willingc/oh-mainline,vipul-sharma20/oh-mainline,eeshangarg/oh-mainline,Changaco/oh-mainline,willingc/oh-mainline,heeraj123/oh-mainline,ehashman/oh-mainline,nirmeshk/oh-mainline,jledbetter/openhatch,Changaco/oh-mainline,eeshangarg/oh-mainline
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=['all@openhatch.org']
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
Correct the way we set ADMINS
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
|
<commit_before>from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=['all@openhatch.org']
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
<commit_msg>Correct the way we set ADMINS<commit_after>
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
|
from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=['all@openhatch.org']
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
Correct the way we set ADMINSfrom settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
|
<commit_before>from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=['all@openhatch.org']
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
<commit_msg>Correct the way we set ADMINS<commit_after>from settings import *
OHLOH_API_KEY='SXvLaGPJFaKXQC0VOocAg'
DEBUG=False
ADMINS=[
('All OH devs', 'devel@openhatch.org'),
]
INVITE_MODE=True # Enabled on production site
INVITATIONS_PER_USER=20
TEMPLTE_DEBUG=False
|
3ac0deb7a51043b706ec242bad10a2be77794983
|
coffee/config.py
|
coffee/config.py
|
import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '0.0.0.0'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
|
import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '127.0.0.1'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
|
Set default server host address to 127.0.0.1
|
Set default server host address to 127.0.0.1
|
Python
|
mit
|
webkom/coffee,webkom/coffee
|
import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '0.0.0.0'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
Set default server host address to 127.0.0.1
|
import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '127.0.0.1'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
|
<commit_before>import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '0.0.0.0'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
<commit_msg>Set default server host address to 127.0.0.1<commit_after>
|
import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '127.0.0.1'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
|
import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '0.0.0.0'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
Set default server host address to 127.0.0.1import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '127.0.0.1'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
|
<commit_before>import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '0.0.0.0'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
<commit_msg>Set default server host address to 127.0.0.1<commit_after>import os
app_config = {
'DEBUG': bool(os.getenv('DEBUG', True)),
'REDIS_DB': int(os.getenv('REDIS_DB', 1)),
'REDIS_HOST': os.getenv('REDIS_HOST', '127.0.0.1'),
'REDIS_PORT': int(os.getenv('REDIS_PORT', 6379)),
'REDIS_PW': os.getenv('REDIS_PW', None),
'SERVER_HOST': os.getenv('SERVER_HOST', '127.0.0.1'),
'SERVER_PORT': int(os.getenv('SERVER_PORT', 5000))
}
|
1557de38bcc9fa4099655c210d7e2daf7c19d715
|
task/models.py
|
task/models.py
|
from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateField()
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
def __unicode__(self): # pragma: no cover
return self.title
|
import datetime
from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
class Meta:
ordering = ('-created_at',)
def __unicode__(self): # pragma: no cover
return self.title
|
Set order getting the list of tasks
|
Set order getting the list of tasks
|
Python
|
mit
|
rosadurante/to_do,rosadurante/to_do
|
from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateField()
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
def __unicode__(self): # pragma: no cover
return self.title
Set order getting the list of tasks
|
import datetime
from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
class Meta:
ordering = ('-created_at',)
def __unicode__(self): # pragma: no cover
return self.title
|
<commit_before>from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateField()
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
def __unicode__(self): # pragma: no cover
return self.title
<commit_msg>Set order getting the list of tasks<commit_after>
|
import datetime
from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
class Meta:
ordering = ('-created_at',)
def __unicode__(self): # pragma: no cover
return self.title
|
from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateField()
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
def __unicode__(self): # pragma: no cover
return self.title
Set order getting the list of tasksimport datetime
from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
class Meta:
ordering = ('-created_at',)
def __unicode__(self): # pragma: no cover
return self.title
|
<commit_before>from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateField()
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
def __unicode__(self): # pragma: no cover
return self.title
<commit_msg>Set order getting the list of tasks<commit_after>import datetime
from django.db import models
from django.conf import settings
class Task(models.Model):
title = models.CharField(max_length=50, unique=True)
created_at = models.DateTimeField(auto_now_add=True)
status = models.CharField(max_length=30, choices=settings.TASK_CHOICES)
class Meta:
ordering = ('-created_at',)
def __unicode__(self): # pragma: no cover
return self.title
|
172b2aaf505b1971bceb934e5e3d9e5dce1acbb1
|
api/views.py
|
api/views.py
|
# coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializer
|
# coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializer
class AirAverageViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirAverage.objects.all().order_by('-from_time')[:10] # 5 days
serializer_class = AirAverageSerializer
|
Add pm2.5 avg view api
|
Add pm2.5 avg view api
|
Python
|
mit
|
banbanchs/leda,banbanchs/leda,banbanchs/leda
|
# coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializerAdd pm2.5 avg view api
|
# coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializer
class AirAverageViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirAverage.objects.all().order_by('-from_time')[:10] # 5 days
serializer_class = AirAverageSerializer
|
<commit_before># coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializer<commit_msg>Add pm2.5 avg view api<commit_after>
|
# coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializer
class AirAverageViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirAverage.objects.all().order_by('-from_time')[:10] # 5 days
serializer_class = AirAverageSerializer
|
# coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializerAdd pm2.5 avg view api# coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializer
class AirAverageViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirAverage.objects.all().order_by('-from_time')[:10] # 5 days
serializer_class = AirAverageSerializer
|
<commit_before># coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializer<commit_msg>Add pm2.5 avg view api<commit_after># coding=utf-8
from rest_framework import viewsets
from .models import AirCondition, AirAverage
from .serializers import AirAverageSerializer, AirConditionSerializer
class AirConditionViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirCondition.objects.all().order_by('-time')[:24] # 24 hours
serializer_class = AirConditionSerializer
class AirAverageViewSets(viewsets.ReadOnlyModelViewSet):
queryset = AirAverage.objects.all().order_by('-from_time')[:10] # 5 days
serializer_class = AirAverageSerializer
|
4de03c57bf4f4995eb8c8859e0a40b7c5fc9942b
|
desktop/libs/libzookeeper/src/libzookeeper/models.py
|
desktop/libs/libzookeeper/src/libzookeeper/models.py
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=PRINCIPAL_NAME.get())
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from hadoop import cluster
from desktop.lib.exceptions_renderable import PopupException
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
hdfs = cluster.get_hdfs()
if hdfs is None:
raise PopupException(_('No [hdfs] configured in hue.ini.'))
if hdfs.security_enabled:
sasl_server_principal = PRINCIPAL_NAME.get()
else:
sasl_server_principal = None
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=sasl_server_principal)
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
|
Enable Kerberos automatically based on HDFS security
|
[libzookeeper] Enable Kerberos automatically based on HDFS security
We don't need another property that way and Kerberos is a all or nothing
setup.
Even if HDFS is not used in Hue, the default hue.ini has security set
to false.
|
Python
|
apache-2.0
|
pratikmallya/hue,jjmleiro/hue,lumig242/Hue-Integration-with-CDAP,cloudera/hue,pratikmallya/hue,xiangel/hue,Peddle/hue,x303597316/hue,cloudera/hue,rahul67/hue,kawamon/hue,yongshengwang/hue,MobinRanjbar/hue,x303597316/hue,xq262144/hue,jayceyxc/hue,mapr/hue,yongshengwang/hue,pratikmallya/hue,sanjeevtripurari/hue,lumig242/Hue-Integration-with-CDAP,jayceyxc/hue,cloudera/hue,jounex/hue,Peddle/hue,ChenJunor/hue,jayceyxc/hue,kawamon/hue,Peddle/hue,vmax-feihu/hue,MobinRanjbar/hue,vmax-feihu/hue,ahmed-mahran/hue,ahmed-mahran/hue,kawamon/hue,kawamon/hue,GitHublong/hue,hdinsight/hue,Peddle/hue,kawamon/hue,xiangel/hue,yoer/hue,pratikmallya/hue,yongshengwang/hue,kawamon/hue,cloudera/hue,jounex/hue,cloudera/hue,rahul67/hue,javachengwc/hue,azureplus/hue,kawamon/hue,vmax-feihu/hue,fangxingli/hue,hdinsight/hue,jounex/hue,cloudera/hue,sanjeevtripurari/hue,rahul67/hue,lumig242/Hue-Integration-with-CDAP,jjmleiro/hue,kawamon/hue,cloudera/hue,javachengwc/hue,cloudera/hue,yongshengwang/hue,jjmleiro/hue,mapr/hue,yoer/hue,yongshengwang/hue,todaychi/hue,kawamon/hue,GitHublong/hue,xq262144/hue,jounex/hue,kawamon/hue,ChenJunor/hue,cloudera/hue,x303597316/hue,jayceyxc/hue,todaychi/hue,jounex/hue,GitHublong/hue,hdinsight/hue,yoer/hue,hdinsight/hue,fangxingli/hue,kawamon/hue,lumig242/Hue-Integration-with-CDAP,x303597316/hue,ahmed-mahran/hue,azureplus/hue,xiangel/hue,todaychi/hue,jjmleiro/hue,jounex/hue,cloudera/hue,fangxingli/hue,vmax-feihu/hue,yongshengwang/hue,hdinsight/hue,kawamon/hue,ChenJunor/hue,todaychi/hue,lumig242/Hue-Integration-with-CDAP,rahul67/hue,javachengwc/hue,todaychi/hue,todaychi/hue,rahul67/hue,kawamon/hue,GitHublong/hue,yoer/hue,fangxingli/hue,javachengwc/hue,todaychi/hue,cloudera/hue,cloudera/hue,ChenJunor/hue,jayceyxc/hue,jayceyxc/hue,Peddle/hue,sanjeevtripurari/hue,lumig242/Hue-Integration-with-CDAP,xq262144/hue,pratikmallya/hue,GitHublong/hue,x303597316/hue,jayceyxc/hue,ahmed-mahran/hue,MobinRanjbar/hue,MobinRanjbar/hue,vmax-feihu/hue,fangxingli/hue,mapr/hue,xiangel/hue,kawamon/hue,lumig242/Hue-Integration-with-CDAP,mapr/hue,mapr/hue,fangxingli/hue,ChenJunor/hue,jayceyxc/hue,jjmleiro/hue,GitHublong/hue,todaychi/hue,pratikmallya/hue,cloudera/hue,ahmed-mahran/hue,cloudera/hue,xq262144/hue,rahul67/hue,jayceyxc/hue,azureplus/hue,jjmleiro/hue,MobinRanjbar/hue,javachengwc/hue,azureplus/hue,mapr/hue,jounex/hue,xiangel/hue,vmax-feihu/hue,hdinsight/hue,sanjeevtripurari/hue,ahmed-mahran/hue,yoer/hue,ahmed-mahran/hue,pratikmallya/hue,hdinsight/hue,cloudera/hue,ChenJunor/hue,xiangel/hue,kawamon/hue,xq262144/hue,yongshengwang/hue,jounex/hue,hdinsight/hue,MobinRanjbar/hue,rahul67/hue,Peddle/hue,kawamon/hue,xq262144/hue,azureplus/hue,lumig242/Hue-Integration-with-CDAP,vmax-feihu/hue,Peddle/hue,xiangel/hue,sanjeevtripurari/hue,sanjeevtripurari/hue,x303597316/hue,MobinRanjbar/hue,javachengwc/hue,xq262144/hue,fangxingli/hue,x303597316/hue,mapr/hue,xq262144/hue,pratikmallya/hue,sanjeevtripurari/hue,javachengwc/hue,sanjeevtripurari/hue,yoer/hue,rahul67/hue,kawamon/hue,azureplus/hue,jjmleiro/hue,Peddle/hue,ChenJunor/hue,MobinRanjbar/hue,cloudera/hue,yongshengwang/hue,azureplus/hue,yoer/hue,ChenJunor/hue,cloudera/hue,vmax-feihu/hue,cloudera/hue,azureplus/hue,yoer/hue,jjmleiro/hue,javachengwc/hue,GitHublong/hue,Peddle/hue,ahmed-mahran/hue,todaychi/hue,x303597316/hue,jjmleiro/hue,xiangel/hue,lumig242/Hue-Integration-with-CDAP,kawamon/hue,fangxingli/hue,GitHublong/hue,xq262144/hue
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=PRINCIPAL_NAME.get())
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
[libzookeeper] Enable Kerberos automatically based on HDFS security
We don't need another property that way and Kerberos is a all or nothing
setup.
Even if HDFS is not used in Hue, the default hue.ini has security set
to false.
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from hadoop import cluster
from desktop.lib.exceptions_renderable import PopupException
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
hdfs = cluster.get_hdfs()
if hdfs is None:
raise PopupException(_('No [hdfs] configured in hue.ini.'))
if hdfs.security_enabled:
sasl_server_principal = PRINCIPAL_NAME.get()
else:
sasl_server_principal = None
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=sasl_server_principal)
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
|
<commit_before>#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=PRINCIPAL_NAME.get())
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
<commit_msg>[libzookeeper] Enable Kerberos automatically based on HDFS security
We don't need another property that way and Kerberos is a all or nothing
setup.
Even if HDFS is not used in Hue, the default hue.ini has security set
to false.<commit_after>
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from hadoop import cluster
from desktop.lib.exceptions_renderable import PopupException
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
hdfs = cluster.get_hdfs()
if hdfs is None:
raise PopupException(_('No [hdfs] configured in hue.ini.'))
if hdfs.security_enabled:
sasl_server_principal = PRINCIPAL_NAME.get()
else:
sasl_server_principal = None
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=sasl_server_principal)
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=PRINCIPAL_NAME.get())
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
[libzookeeper] Enable Kerberos automatically based on HDFS security
We don't need another property that way and Kerberos is a all or nothing
setup.
Even if HDFS is not used in Hue, the default hue.ini has security set
to false.#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from hadoop import cluster
from desktop.lib.exceptions_renderable import PopupException
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
hdfs = cluster.get_hdfs()
if hdfs is None:
raise PopupException(_('No [hdfs] configured in hue.ini.'))
if hdfs.security_enabled:
sasl_server_principal = PRINCIPAL_NAME.get()
else:
sasl_server_principal = None
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=sasl_server_principal)
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
|
<commit_before>#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=PRINCIPAL_NAME.get())
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
<commit_msg>[libzookeeper] Enable Kerberos automatically based on HDFS security
We don't need another property that way and Kerberos is a all or nothing
setup.
Even if HDFS is not used in Hue, the default hue.ini has security set
to false.<commit_after>#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from kazoo.client import KazooClient
from hadoop import cluster
from desktop.lib.exceptions_renderable import PopupException
from libzookeeper.conf import PRINCIPAL_NAME
def get_children_data(ensemble, namespace, read_only=True):
hdfs = cluster.get_hdfs()
if hdfs is None:
raise PopupException(_('No [hdfs] configured in hue.ini.'))
if hdfs.security_enabled:
sasl_server_principal = PRINCIPAL_NAME.get()
else:
sasl_server_principal = None
zk = KazooClient(hosts=ensemble, read_only=read_only, sasl_server_principal=sasl_server_principal)
zk.start()
children_data = []
children = zk.get_children(namespace)
for node in children:
data, stat = zk.get("%s/%s" % (namespace, node))
children_data.append(data)
zk.stop()
return children_data
|
5bd9af7c35603cca49303f56096bc279234e547d
|
ci/fix_paths.py
|
ci/fix_paths.py
|
import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib()
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
|
import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True)
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
|
Use plat_specific site-packages dir in CI script
|
Use plat_specific site-packages dir in CI script
|
Python
|
bsd-3-clause
|
h5py/h5py,h5py/h5py,h5py/h5py
|
import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib()
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
Use plat_specific site-packages dir in CI script
|
import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True)
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
|
<commit_before>import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib()
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
<commit_msg>Use plat_specific site-packages dir in CI script<commit_after>
|
import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True)
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
|
import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib()
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
Use plat_specific site-packages dir in CI scriptimport distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True)
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
|
<commit_before>import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib()
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
<commit_msg>Use plat_specific site-packages dir in CI script<commit_after>import distutils.sysconfig
from glob import glob
import os
from os.path import join as pjoin, basename
from shutil import copy
from sys import platform
def main():
"""
Copy HDF5 DLLs into installed h5py package
"""
# This is the function Tox also uses to locate site-packages (Apr 2019)
sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True)
print("site packages dir:", sitepackagesdir)
hdf5_path = os.environ.get("HDF5_DIR")
print("HDF5_DIR", hdf5_path)
if platform.startswith('win'):
for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
copy(f, pjoin(sitepackagesdir, 'h5py', basename(f)))
print("Copied", f)
print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py')))
if __name__ == '__main__':
main()
|
2405af4942781f997bf93501850e41529d4be072
|
py/test/selenium/webdriver/common/proxy_tests.py
|
py/test/selenium/webdriver/common/proxy_tests.py
|
#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'manual',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
|
#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'MANUAL',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
|
Fix test as well :)
|
DanielWagnerHall: Fix test as well :)
git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@17825 07704840-8298-11de-bf8c-fd130f914ac9
|
Python
|
apache-2.0
|
virajs/selenium-1,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,winhamwr/selenium,winhamwr/selenium,winhamwr/selenium,virajs/selenium-1,winhamwr/selenium,virajs/selenium-1,virajs/selenium-1,virajs/selenium-1,winhamwr/selenium
|
#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'manual',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
DanielWagnerHall: Fix test as well :)
git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@17825 07704840-8298-11de-bf8c-fd130f914ac9
|
#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'MANUAL',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
|
<commit_before>#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'manual',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
<commit_msg>DanielWagnerHall: Fix test as well :)
git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@17825 07704840-8298-11de-bf8c-fd130f914ac9<commit_after>
|
#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'MANUAL',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
|
#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'manual',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
DanielWagnerHall: Fix test as well :)
git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@17825 07704840-8298-11de-bf8c-fd130f914ac9#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'MANUAL',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
|
<commit_before>#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'manual',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
<commit_msg>DanielWagnerHall: Fix test as well :)
git-svn-id: 4179480af2c2519a5eb5e1e9b541cbdf5cf27696@17825 07704840-8298-11de-bf8c-fd130f914ac9<commit_after>#!/usr/bin/python
# Copyright 2012 Software Freedom Conservancy.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS.
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from selenium.webdriver.common.proxy import Proxy
class ProxyTests(unittest.TestCase):
def testCanAddToDesiredCapabilities(self):
desired_capabilities = {}
proxy = Proxy()
proxy.http_proxy = 'some.url:1234'
proxy.add_to_capabilities(desired_capabilities)
expected_capabilities = {
'proxy': {
'proxyType': 'MANUAL',
'httpProxy': 'some.url:1234'
}
}
self.assertEqual(expected_capabilities, desired_capabilities)
|
3a89181d0adb53a2a3d428485d5e3deaeb950a02
|
fixedwidthwriter/__init__.py
|
fixedwidthwriter/__init__.py
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
_row = []
for field in self.fields:
try:
_key, _width, _options = field
except ValueError:
_key, _width = field
_options = {}
_value = rowdict[_key]
_decimal_spaces = _options.get('decimal_spaces', 0)
if _decimal_spaces:
_value = unicode(Decimal(_value)
.quantize(Decimal(10)**-_decimal_spaces))
_part = '{0: {1}{2}}' \
.format(_value, _options.get('direction', '<'), _width)
_row.append(_part)
_row = ''.join(_row)
self.fd.write(_row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
Remove leading underscores from variables.
|
Remove leading underscores from variables.
|
Python
|
mit
|
ArthurPBressan/py-fixedwidthwriter,HardDiskD/py-fixedwidthwriter
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
_row = []
for field in self.fields:
try:
_key, _width, _options = field
except ValueError:
_key, _width = field
_options = {}
_value = rowdict[_key]
_decimal_spaces = _options.get('decimal_spaces', 0)
if _decimal_spaces:
_value = unicode(Decimal(_value)
.quantize(Decimal(10)**-_decimal_spaces))
_part = '{0: {1}{2}}' \
.format(_value, _options.get('direction', '<'), _width)
_row.append(_part)
_row = ''.join(_row)
self.fd.write(_row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
Remove leading underscores from variables.
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
<commit_before># coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
_row = []
for field in self.fields:
try:
_key, _width, _options = field
except ValueError:
_key, _width = field
_options = {}
_value = rowdict[_key]
_decimal_spaces = _options.get('decimal_spaces', 0)
if _decimal_spaces:
_value = unicode(Decimal(_value)
.quantize(Decimal(10)**-_decimal_spaces))
_part = '{0: {1}{2}}' \
.format(_value, _options.get('direction', '<'), _width)
_row.append(_part)
_row = ''.join(_row)
self.fd.write(_row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
<commit_msg>Remove leading underscores from variables.<commit_after>
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
_row = []
for field in self.fields:
try:
_key, _width, _options = field
except ValueError:
_key, _width = field
_options = {}
_value = rowdict[_key]
_decimal_spaces = _options.get('decimal_spaces', 0)
if _decimal_spaces:
_value = unicode(Decimal(_value)
.quantize(Decimal(10)**-_decimal_spaces))
_part = '{0: {1}{2}}' \
.format(_value, _options.get('direction', '<'), _width)
_row.append(_part)
_row = ''.join(_row)
self.fd.write(_row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
Remove leading underscores from variables.# coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
<commit_before># coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
_row = []
for field in self.fields:
try:
_key, _width, _options = field
except ValueError:
_key, _width = field
_options = {}
_value = rowdict[_key]
_decimal_spaces = _options.get('decimal_spaces', 0)
if _decimal_spaces:
_value = unicode(Decimal(_value)
.quantize(Decimal(10)**-_decimal_spaces))
_part = '{0: {1}{2}}' \
.format(_value, _options.get('direction', '<'), _width)
_row.append(_part)
_row = ''.join(_row)
self.fd.write(_row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
<commit_msg>Remove leading underscores from variables.<commit_after># coding: utf-8
from decimal import Decimal
class FixedWidthWriter():
def __init__(self, fd, fields, line_ending='linux'):
self.fd = fd
self.fields = fields
if line_ending == 'linux':
self.line_ending = '\n'
elif line_ending == 'windows':
self.line_ending = '\r\n'
else:
raise ValueError('Only windows or linux line endings supported')
def writerow(self, rowdict):
row = []
for field in self.fields:
try:
key, width, options = field
except ValueError:
key, width = field
options = {}
value = rowdict[key]
decimal_spaces = options.get('decimal_spaces', 0)
if decimal_spaces:
value = unicode(Decimal(value)
.quantize(Decimal(10)**-decimal_spaces))
part = '{0: {1}{2}}' \
.format(value, options.get('direction', '<'), width)
row.append(part)
row = ''.join(row)
self.fd.write(row + self.line_ending)
def writerows(self, rowdicts):
for rowdict in rowdicts:
self.writerow(rowdict)
|
7579cc3058ad172cb058fbefd43f756a2316e256
|
examples/modelzoo/download_model.py
|
examples/modelzoo/download_model.py
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
descriptor='Download a Caffe reference model')
parser.add_argument('model_type',
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
description='Download a Caffe reference model')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
Fix argparse of caffe model download script
|
Fix argparse of caffe model download script
|
Python
|
mit
|
bayerj/chainer,kashif/chainer,AlpacaDB/chainer,AlpacaDB/chainer,kiyukuta/chainer,okuta/chainer,umitanuki/chainer,tkerola/chainer,aonotas/chainer,cupy/cupy,ktnyt/chainer,kikusu/chainer,tscohen/chainer,wkentaro/chainer,okuta/chainer,1986ks/chainer,Kaisuke5/chainer,muupan/chainer,kikusu/chainer,okuta/chainer,ktnyt/chainer,chainer/chainer,keisuke-umezawa/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,wkentaro/chainer,minhpqn/chainer,hvy/chainer,cupy/cupy,chainer/chainer,wavelets/chainer,woodshop/chainer,niboshi/chainer,t-abe/chainer,ikasumi/chainer,kuwa32/chainer,ysekky/chainer,ytoyama/yans_chainer_hackathon,ktnyt/chainer,sinhrks/chainer,jfsantos/chainer,cupy/cupy,elviswf/chainer,jnishi/chainer,niboshi/chainer,benob/chainer,tigerneil/chainer,sinhrks/chainer,jnishi/chainer,jnishi/chainer,yanweifu/chainer,niboshi/chainer,keisuke-umezawa/chainer,anaruse/chainer,ktnyt/chainer,chainer/chainer,muupan/chainer,woodshop/complex-chainer,masia02/chainer,wkentaro/chainer,hidenori-t/chainer,delta2323/chainer,tereka114/chainer,hvy/chainer,pfnet/chainer,wkentaro/chainer,jnishi/chainer,keisuke-umezawa/chainer,laysakura/chainer,hvy/chainer,t-abe/chainer,rezoo/chainer,okuta/chainer,cemoody/chainer,truongdq/chainer,keisuke-umezawa/chainer,sou81821/chainer,ronekko/chainer,cupy/cupy,benob/chainer,truongdq/chainer
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
descriptor='Download a Caffe reference model')
parser.add_argument('model_type',
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
Fix argparse of caffe model download script
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
description='Download a Caffe reference model')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
descriptor='Download a Caffe reference model')
parser.add_argument('model_type',
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
<commit_msg>Fix argparse of caffe model download script<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
description='Download a Caffe reference model')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
descriptor='Download a Caffe reference model')
parser.add_argument('model_type',
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
Fix argparse of caffe model download script#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
description='Download a Caffe reference model')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
<commit_before>#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
descriptor='Download a Caffe reference model')
parser.add_argument('model_type',
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
<commit_msg>Fix argparse of caffe model download script<commit_after>#!/usr/bin/env python
from __future__ import print_function
import argparse
import six
parser = argparse.ArgumentParser(
description='Download a Caffe reference model')
parser.add_argument('model_type', choices=('alexnet', 'caffenet', 'googlenet'),
help='Model type (alexnet, caffenet, googlenet)')
args = parser.parse_args()
if args.model_type == 'alexnet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel'
name = 'bvlc_alexnet.caffemodel'
elif args.model_type == 'caffenet':
url = 'http://dl.caffe.berkeleyvision.org/' \
'bvlc_reference_caffenet.caffemodel'
name = 'bvlc_reference_caffenet.caffemodel'
elif args.model_type == 'googlenet':
url = 'http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel'
name = 'bvlc_googlenet.caffemodel'
else:
raise RuntimeError('Invalid model type. Choose from '
'alexnet, caffenet, and googlenet.')
print('Downloading model file...')
six.moves.urllib.request.urlretrieve(url, name)
print('Done')
|
da2e34ca3371f0898df8b3181ba98132bd9a26e4
|
txircd/modbase.py
|
txircd/modbase.py
|
# The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
|
# The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
}
|
Add a function for commands to process parameters
|
Add a function for commands to process parameters
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,DesertBus/txircd,Heufneutje/txircd
|
# The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
passAdd a function for commands to process parameters
|
# The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
}
|
<commit_before># The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass<commit_msg>Add a function for commands to process parameters<commit_after>
|
# The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
}
|
# The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
passAdd a function for commands to process parameters# The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
}
|
<commit_before># The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass<commit_msg>Add a function for commands to process parameters<commit_after># The purpose of this file is to provide base classes with the needed functions
# already defined; this allows us to guarantee that any exceptions raised
# during function calls are a problem with the module and not just that the
# particular function isn't defined.
class Module(object):
def hook(self, base):
self.ircd = base
return self
class Mode(object):
def hook(self, base):
self.ircd = base
return self
def prefixSymbol(self):
return None
def checkSet(self, channel, param):
return True
def checkUnset(self, channel, param):
return True
def onJoin(self, channel, user, params):
return "pass"
def onMessage(self, sender, target, message):
return ["pass"]
def onPart(self, channel, user, reason):
pass
def onTopicChange(self, channel, user, topic):
pass
def commandData(self, command, *args):
pass
def Command(object):
def hook(self, base):
self.ircd = base
return self
def onUse(self, user, params):
pass
def processParams(self, user, params):
return {
"user": user,
"params": params
}
|
2db334e452e2ee2d5f0cbc516dc6cb04b61e598d
|
yargy/labels.py
|
yargy/labels.py
|
GENDERS = ("masc", "femn", "neut", "Ms-f")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf = next(results)
*candidate_token_genders, candidate_token_msf = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
|
GENDERS = ("masc", "femn", "neut", "Ms-f", "GNdr")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf, case_token_gndr = next(results)
*candidate_token_genders, candidate_token_msf, candidate_token_gndr = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
elif case_token_gndr or candidate_token_gndr:
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
|
Check for `GNdr` grammeme in `gender-match` label
|
Check for `GNdr` grammeme in `gender-match` label
|
Python
|
mit
|
bureaucratic-labs/yargy
|
GENDERS = ("masc", "femn", "neut", "Ms-f")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf = next(results)
*candidate_token_genders, candidate_token_msf = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
Check for `GNdr` grammeme in `gender-match` label
|
GENDERS = ("masc", "femn", "neut", "Ms-f", "GNdr")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf, case_token_gndr = next(results)
*candidate_token_genders, candidate_token_msf, candidate_token_gndr = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
elif case_token_gndr or candidate_token_gndr:
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
|
<commit_before>GENDERS = ("masc", "femn", "neut", "Ms-f")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf = next(results)
*candidate_token_genders, candidate_token_msf = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
<commit_msg>Check for `GNdr` grammeme in `gender-match` label<commit_after>
|
GENDERS = ("masc", "femn", "neut", "Ms-f", "GNdr")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf, case_token_gndr = next(results)
*candidate_token_genders, candidate_token_msf, candidate_token_gndr = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
elif case_token_gndr or candidate_token_gndr:
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
|
GENDERS = ("masc", "femn", "neut", "Ms-f")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf = next(results)
*candidate_token_genders, candidate_token_msf = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
Check for `GNdr` grammeme in `gender-match` labelGENDERS = ("masc", "femn", "neut", "Ms-f", "GNdr")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf, case_token_gndr = next(results)
*candidate_token_genders, candidate_token_msf, candidate_token_gndr = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
elif case_token_gndr or candidate_token_gndr:
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
|
<commit_before>GENDERS = ("masc", "femn", "neut", "Ms-f")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf = next(results)
*candidate_token_genders, candidate_token_msf = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
<commit_msg>Check for `GNdr` grammeme in `gender-match` label<commit_after>GENDERS = ("masc", "femn", "neut", "Ms-f", "GNdr")
def gram_label(token, value, stack):
return value in token.grammemes
def gram_not_label(token, value, stack):
return not value in token.grammemes
def gender_match_label(token, index, stack, genders=GENDERS):
results = ((g in t.grammemes for g in genders) for t in (stack[index], token))
*case_token_genders, case_token_msf, case_token_gndr = next(results)
*candidate_token_genders, candidate_token_msf, candidate_token_gndr = next(results)
if not candidate_token_genders == case_token_genders:
if case_token_msf:
if any(candidate_token_genders[:2]):
return True
elif case_token_gndr or candidate_token_gndr:
return True
else:
return True
return False
def dictionary_label(token, values, stack):
return any((n in values) for n in token.forms)
LABELS_LOOKUP_MAP = {
"gram": gram_label,
"gram-not": gram_not_label,
"dictionary": dictionary_label,
"gender-match": gender_match_label,
}
|
a44ec4543fc6951cd45ba3c1696e428e36a9c161
|
commands/say.py
|
commands/say.py
|
from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
message.bot.sendMessage(message.messageParts[0], messageToSay, messageType)
|
from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
target = message.messageParts[0]
#Make absolutely sure the target isn't unicode, because Twisted doesn't like that
try:
target = target.encode('utf-8')
except (UnicodeEncodeError, UnicodeDecodeError):
print "[Say module] Unable to convert '{}' to a string".format(target)
message.bot.sendMessage(target, messageToSay, messageType)
|
Make sure the target of Say isn't in Unicode, otherwise Twisted complains
|
Make sure the target of Say isn't in Unicode, otherwise Twisted complains
|
Python
|
mit
|
Didero/DideRobot
|
from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
message.bot.sendMessage(message.messageParts[0], messageToSay, messageType)
Make sure the target of Say isn't in Unicode, otherwise Twisted complains
|
from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
target = message.messageParts[0]
#Make absolutely sure the target isn't unicode, because Twisted doesn't like that
try:
target = target.encode('utf-8')
except (UnicodeEncodeError, UnicodeDecodeError):
print "[Say module] Unable to convert '{}' to a string".format(target)
message.bot.sendMessage(target, messageToSay, messageType)
|
<commit_before>from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
message.bot.sendMessage(message.messageParts[0], messageToSay, messageType)
<commit_msg>Make sure the target of Say isn't in Unicode, otherwise Twisted complains<commit_after>
|
from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
target = message.messageParts[0]
#Make absolutely sure the target isn't unicode, because Twisted doesn't like that
try:
target = target.encode('utf-8')
except (UnicodeEncodeError, UnicodeDecodeError):
print "[Say module] Unable to convert '{}' to a string".format(target)
message.bot.sendMessage(target, messageToSay, messageType)
|
from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
message.bot.sendMessage(message.messageParts[0], messageToSay, messageType)
Make sure the target of Say isn't in Unicode, otherwise Twisted complainsfrom CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
target = message.messageParts[0]
#Make absolutely sure the target isn't unicode, because Twisted doesn't like that
try:
target = target.encode('utf-8')
except (UnicodeEncodeError, UnicodeDecodeError):
print "[Say module] Unable to convert '{}' to a string".format(target)
message.bot.sendMessage(target, messageToSay, messageType)
|
<commit_before>from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
message.bot.sendMessage(message.messageParts[0], messageToSay, messageType)
<commit_msg>Make sure the target of Say isn't in Unicode, otherwise Twisted complains<commit_after>from CommandTemplate import CommandTemplate
from IrcMessage import IrcMessage
class Command(CommandTemplate):
triggers = ['say', 'do', 'notice']
helptext = "Makes the bot say the provided text in the provided channel (format 'say [channel/user] text')"
adminOnly = True
showInCommandList = False
def execute(self, message):
"""
:type message: IrcMessage
"""
if message.messagePartsLength < 2:
message.bot.say(message.source, u"Please provide both a channel or user name to say something to, and the text to say")
#Check if we're in the channel we have to say something to
elif not message.isPrivateMessage and message.messageParts[0] not in message.bot.channelsUserList:
message.bot.say(message.source, u"I'm not in that channel, so I can't say anything in there, sorry.")
#Nothing's stopping us now! Say it!
else:
messageToSay = u" ".join(message.messageParts[1:])
messageType = u'say'
if message.trigger == u'do':
messageType = u'action'
elif message.trigger == u'notice':
messageType = u'notice'
target = message.messageParts[0]
#Make absolutely sure the target isn't unicode, because Twisted doesn't like that
try:
target = target.encode('utf-8')
except (UnicodeEncodeError, UnicodeDecodeError):
print "[Say module] Unable to convert '{}' to a string".format(target)
message.bot.sendMessage(target, messageToSay, messageType)
|
42c2389c88fc52e186079df1c426af429537ed0e
|
io_EDM/__init__.py
|
io_EDM/__init__.py
|
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,0,1),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass
|
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,3,0),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass
|
Update blender plugin version to the next release number
|
Update blender plugin version to the next release number
|
Python
|
mit
|
ndevenish/Blender_ioEDM,ndevenish/Blender_ioEDM
|
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,0,1),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
passUpdate blender plugin version to the next release number
|
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,3,0),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass
|
<commit_before>
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,0,1),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass<commit_msg>Update blender plugin version to the next release number<commit_after>
|
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,3,0),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass
|
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,0,1),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
passUpdate blender plugin version to the next release number
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,3,0),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass
|
<commit_before>
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,0,1),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass<commit_msg>Update blender plugin version to the next release number<commit_after>
bl_info = {
'name': "Import: .EDM model files",
'description': "Importing of .EDM model files",
'author': "Nicholas Devenish",
'version': (0,3,0),
'blender': (2, 78, 0),
'location': "File > Import/Export > .EDM Files",
'category': 'Import-Export',
}
try:
import bpy
def register():
from .io_operators import register as importer_register
from .rna import register as rna_register
from .panels import register as panels_register
rna_register()
panels_register()
importer_register()
def unregister():
from .io_operators import unregister as importer_unregister
from .rna import unregister as rna_unregister
from .panels import unregister as panels_unregister
importer_unregister()
panels_unregister()
rna_unregister()
if __name__ == "__main__":
register()
except ImportError:
# Allow for now, as we might just want to import the sub-package
pass
|
621337bd685a200a37bcbbd5fe3441d2090aab54
|
cr8/__main__.py
|
cr8/__main__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# PYTHON_ARGCOMPLETE_OK
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
|
Add PYTHON_ARGCOMPLETE_OK to enable completion for argcomplete users
|
Add PYTHON_ARGCOMPLETE_OK to enable completion for argcomplete users
|
Python
|
mit
|
mikethebeer/cr8,mfussenegger/cr8
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
Add PYTHON_ARGCOMPLETE_OK to enable completion for argcomplete users
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# PYTHON_ARGCOMPLETE_OK
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
<commit_msg>Add PYTHON_ARGCOMPLETE_OK to enable completion for argcomplete users<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# PYTHON_ARGCOMPLETE_OK
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
Add PYTHON_ARGCOMPLETE_OK to enable completion for argcomplete users#!/usr/bin/env python
# -*- coding: utf-8 -*-
# PYTHON_ARGCOMPLETE_OK
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
<commit_msg>Add PYTHON_ARGCOMPLETE_OK to enable completion for argcomplete users<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# PYTHON_ARGCOMPLETE_OK
import argh
import argparse
from cr8 import __version__
from cr8.timeit import timeit
from cr8.insert_json import insert_json
from cr8.insert_fake_data import insert_fake_data
from cr8.insert_blob import insert_blob
from cr8.run_spec import run_spec
from cr8.run_crate import run_crate
from cr8.run_track import run_track
def main():
p = argh.ArghParser(
prog='cr8', formatter_class=argparse.RawTextHelpFormatter)
p.add_argument(
'--version', action='version', version="%(prog)s " + __version__)
p.add_commands([timeit,
insert_json,
insert_fake_data,
insert_blob,
run_spec,
run_crate,
run_track])
p.dispatch()
if __name__ == '__main__':
main()
|
c2bca21718295b6400471395f5da3ca9d42e8a84
|
modoboa_dmarc/tests/mixins.py
|
modoboa_dmarc/tests/mixins.py
|
"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
call_command("import_aggregated_report", "--pipe")
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
# TODO check return code different from 0
|
"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
from django.utils.six import StringIO
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
out = StringIO()
call_command("import_aggregated_report", "--pipe", stdout=out)
return out.getvalue()
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
ret = self.import_report(fpath)
self.assertNotIn('ERROR-PARSING', ret)
|
Check if error available on output
|
Check if error available on output
|
Python
|
mit
|
modoboa/modoboa-dmarc,modoboa/modoboa-dmarc
|
"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
call_command("import_aggregated_report", "--pipe")
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
# TODO check return code different from 0
Check if error available on output
|
"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
from django.utils.six import StringIO
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
out = StringIO()
call_command("import_aggregated_report", "--pipe", stdout=out)
return out.getvalue()
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
ret = self.import_report(fpath)
self.assertNotIn('ERROR-PARSING', ret)
|
<commit_before>"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
call_command("import_aggregated_report", "--pipe")
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
# TODO check return code different from 0
<commit_msg>Check if error available on output<commit_after>
|
"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
from django.utils.six import StringIO
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
out = StringIO()
call_command("import_aggregated_report", "--pipe", stdout=out)
return out.getvalue()
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
ret = self.import_report(fpath)
self.assertNotIn('ERROR-PARSING', ret)
|
"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
call_command("import_aggregated_report", "--pipe")
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
# TODO check return code different from 0
Check if error available on output"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
from django.utils.six import StringIO
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
out = StringIO()
call_command("import_aggregated_report", "--pipe", stdout=out)
return out.getvalue()
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
ret = self.import_report(fpath)
self.assertNotIn('ERROR-PARSING', ret)
|
<commit_before>"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
call_command("import_aggregated_report", "--pipe")
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
# TODO check return code different from 0
<commit_msg>Check if error available on output<commit_after>"""Test mixins."""
import os
import sys
import six
from django.core.management import call_command
from django.utils.six import StringIO
class CallCommandMixin(object):
"""A mixin to provide command execution shortcuts."""
def setUp(self):
"""Replace stdin"""
super(CallCommandMixin, self).setUp()
self.stdin = sys.stdin
def tearDown(self):
sys.stdin = self.stdin
def import_report(self, path):
"""Import test report from file."""
with open(path) as fp:
buf = six.StringIO(fp.read())
sys.stdin = buf
out = StringIO()
call_command("import_aggregated_report", "--pipe", stdout=out)
return out.getvalue()
def import_reports(self, folder="reports"):
"""Import reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
self.import_report(fpath)
def import_fail_reports(self, folder="fail-reports"):
"""Import failed reports from folder."""
path = os.path.join(os.path.dirname(__file__), folder)
for f in os.listdir(path):
fpath = os.path.join(path, f)
if f.startswith(".") or not os.path.isfile(fpath):
continue
ret = self.import_report(fpath)
self.assertNotIn('ERROR-PARSING', ret)
|
98552a4cb683e25ec9af53024e58644c04b55872
|
molly/external_media/views.py
|
molly/external_media/views.py
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
try:
response = HttpResponse(open(eis.get_filename(), 'rb').read(),
mimetype=eis.content_type.encode('ascii'))
except IOError:
eis.delete()
raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
Handle missing external files gracefully
|
MOX-182: Handle missing external files gracefully
|
Python
|
apache-2.0
|
mollyproject/mollyproject,mollyproject/mollyproject,mollyproject/mollyproject
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
MOX-182: Handle missing external files gracefully
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
try:
response = HttpResponse(open(eis.get_filename(), 'rb').read(),
mimetype=eis.content_type.encode('ascii'))
except IOError:
eis.delete()
raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
<commit_before>from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
<commit_msg>MOX-182: Handle missing external files gracefully<commit_after>
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
try:
response = HttpResponse(open(eis.get_filename(), 'rb').read(),
mimetype=eis.content_type.encode('ascii'))
except IOError:
eis.delete()
raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
MOX-182: Handle missing external files gracefullyfrom email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
try:
response = HttpResponse(open(eis.get_filename(), 'rb').read(),
mimetype=eis.content_type.encode('ascii'))
except IOError:
eis.delete()
raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
<commit_before>from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
response = HttpResponse(open(eis.get_filename(), 'rb').read(), mimetype=eis.content_type.encode('ascii'))
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
<commit_msg>MOX-182: Handle missing external files gracefully<commit_after>from email.utils import formatdate
from datetime import datetime, timedelta
from time import mktime
from django.shortcuts import get_object_or_404
from django.http import HttpResponse, Http404
from molly.utils.views import BaseView
from molly.utils.breadcrumbs import NullBreadcrumb
from models import ExternalImageSized
class IndexView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context):
raise Http404
class ExternalImageView(BaseView):
breadcrumb = NullBreadcrumb
def handle_GET(self, request, context, slug):
eis = get_object_or_404(ExternalImageSized, slug=slug)
try:
response = HttpResponse(open(eis.get_filename(), 'rb').read(),
mimetype=eis.content_type.encode('ascii'))
except IOError:
eis.delete()
raise Http404()
response['ETag'] = slug
response['Expires'] = formatdate(mktime((datetime.now() + timedelta(days=7)).timetuple()))
response['Last-Modified'] = formatdate(mktime(eis.external_image.last_updated.timetuple()))
return response
|
78f96421598a561285b9cd5568fd4acabd52585f
|
offenerhaushalt/generators.py
|
offenerhaushalt/generators.py
|
from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}
|
from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}
@freezer.register_generator
def embed_site():
for site in sites:
yield {'slug': site.slug}
|
Add embed site freeze generator
|
Add embed site freeze generator
Fix tabs/spaces issue as well
|
Python
|
mit
|
Opendatal/offenerhaushalt.de,Opendatal/offenerhaushalt.de,Opendatal/offenerhaushalt.de
|
from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}Add embed site freeze generator
Fix tabs/spaces issue as well
|
from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}
@freezer.register_generator
def embed_site():
for site in sites:
yield {'slug': site.slug}
|
<commit_before>
from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}<commit_msg>Add embed site freeze generator
Fix tabs/spaces issue as well<commit_after>
|
from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}
@freezer.register_generator
def embed_site():
for site in sites:
yield {'slug': site.slug}
|
from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}Add embed site freeze generator
Fix tabs/spaces issue as wellfrom offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}
@freezer.register_generator
def embed_site():
for site in sites:
yield {'slug': site.slug}
|
<commit_before>
from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}<commit_msg>Add embed site freeze generator
Fix tabs/spaces issue as well<commit_after>from offenerhaushalt.core import freezer, pages, sites
@freezer.register_generator
def page():
for page in pages:
yield {'path': page.path}
@freezer.register_generator
def site():
for site in sites:
yield {'slug': site.slug}
@freezer.register_generator
def embed_site():
for site in sites:
yield {'slug': site.slug}
|
9fda25c0a28f7965c2378dcd4b2106ca034052c3
|
plumeria/plugins/have_i_been_pwned.py
|
plumeria/plugins/have_i_been_pwned.py
|
import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
if not len(r.text().strip()):
raise CommandError("Account not found! (That's good.)")
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
|
import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
try:
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
except http.BadStatusCodeError as e:
if e.http_code == 404:
raise CommandError("Account not found! (That's good.)")
else:
raise e
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
|
Handle missing accounts on HaveIBeenPwned properly.
|
Handle missing accounts on HaveIBeenPwned properly.
|
Python
|
mit
|
sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria
|
import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
if not len(r.text().strip()):
raise CommandError("Account not found! (That's good.)")
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
Handle missing accounts on HaveIBeenPwned properly.
|
import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
try:
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
except http.BadStatusCodeError as e:
if e.http_code == 404:
raise CommandError("Account not found! (That's good.)")
else:
raise e
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
|
<commit_before>import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
if not len(r.text().strip()):
raise CommandError("Account not found! (That's good.)")
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
<commit_msg>Handle missing accounts on HaveIBeenPwned properly.<commit_after>
|
import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
try:
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
except http.BadStatusCodeError as e:
if e.http_code == 404:
raise CommandError("Account not found! (That's good.)")
else:
raise e
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
|
import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
if not len(r.text().strip()):
raise CommandError("Account not found! (That's good.)")
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
Handle missing accounts on HaveIBeenPwned properly.import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
try:
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
except http.BadStatusCodeError as e:
if e.http_code == 404:
raise CommandError("Account not found! (That's good.)")
else:
raise e
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
|
<commit_before>import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
if not len(r.text().strip()):
raise CommandError("Account not found! (That's good.)")
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
<commit_msg>Handle missing accounts on HaveIBeenPwned properly.<commit_after>import plumeria.util.http as http
from plumeria import config
from plumeria.command import commands, CommandError
from plumeria.command.parse import Text
from plumeria.message.mappings import build_mapping
from plumeria.util.collections import SafeStructure
from plumeria.util.ratelimit import rate_limit
@commands.register("haveibeenpwned", "pwned", category="Search", params=[Text('query')])
@rate_limit(burst_size=4)
async def have_i_been_pwned(message, query):
"""
Checks where an account (specified by account name or email address) exists
on sites that have experienced data breaches.
Example::
pwned email@example.com
"""
try:
r = await http.get("https://haveibeenpwned.com/api/v2/breachedaccount/" + query, headers=[
('User-Agent', 'Plumeria chat bot (+https://gitlab.com/sk89q/Plumeria)')
])
except http.BadStatusCodeError as e:
if e.http_code == 404:
raise CommandError("Account not found! (That's good.)")
else:
raise e
results = SafeStructure(r.json())
return build_mapping(
[(e.Title, "{} ({} breached) ({})".format(e.BreachDate, e.PwnCount, ", ".join(e.DataClasses))) for e in
results])
|
cc51137aedeee8bdcf6b47e98b195ec750183ab4
|
context_variables/__init__.py
|
context_variables/__init__.py
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# If we got a plain value, return that
if not callable(self.func):
return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
Allow plain values, not just methods
|
Allow plain values, not just methods
|
Python
|
mit
|
carlmjohnson/django-context-variables
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
Allow plain values, not just methods
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# If we got a plain value, return that
if not callable(self.func):
return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
<commit_before>class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
<commit_msg>Allow plain values, not just methods<commit_after>
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# If we got a plain value, return that
if not callable(self.func):
return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
Allow plain values, not just methodsclass context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# If we got a plain value, return that
if not callable(self.func):
return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
<commit_before>class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
<commit_msg>Allow plain values, not just methods<commit_after>class context_variable(object):
def __init__(self, func):
self.func = func
self.__doc__ = func.__doc__
def __get__(self, obj, objtype=None):
# Handle case of being called from class instead of an instance
if obj is None:
return self
# If we got a plain value, return that
if not callable(self.func):
return self.func
# Evaluate the property
value = self.func(obj)
# Save value into the instance, replacing the descriptor
object.__setattr__(obj, self.func.__name__, value)
return value
def get_context_variables(obj):
context = {}
for attr in dir(obj.__class__):
# Don't bother to check _private/__special attributes
if attr.startswith('_'):
continue
# Get attributes off the class, in case they've already been
# cached as their final values in the instance dictionary and to
# avoid general descriptor weirdness
raw_attr = getattr(obj.__class__, attr)
if isinstance(raw_attr, context_variable):
# Force evaluation of obj.`attr`
context[attr] = getattr(obj, attr)
return context
|
96db4f0f42058ba9a8917fd4e9a3d8174f91cbd3
|
version_st2.py
|
version_st2.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
|
# Copyright 2016 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
|
Update licensing info on version file
|
Update licensing info on version file
|
Python
|
apache-2.0
|
StackStorm/mistral,StackStorm/mistral
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
Update licensing info on version file
|
# Copyright 2016 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
|
<commit_before>
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
<commit_msg>Update licensing info on version file<commit_after>
|
# Copyright 2016 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
Update licensing info on version file# Copyright 2016 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
|
<commit_before>
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
<commit_msg>Update licensing info on version file<commit_after># Copyright 2016 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = '1.6dev'
|
9653f3d4d3bd859d592542fc011ad7b81a866052
|
IPython/html/widgets/__init__.py
|
IPython/html/widgets/__init__.py
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from IPython.utils.warn import warn
warn("""The widget API is still considered experimental and
may change by the next major release of IPython.""")
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
|
Make the widget experimental error a real python warning
|
Make the widget experimental error a real python warning
This means it can easily be turned off too.
|
Python
|
bsd-3-clause
|
jupyter-widgets/ipywidgets,cornhundred/ipywidgets,cornhundred/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,cornhundred/ipywidgets,cornhundred/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,ipython/ipywidgets
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from IPython.utils.warn import warn
warn("""The widget API is still considered experimental and
may change by the next major release of IPython.""")
Make the widget experimental error a real python warning
This means it can easily be turned off too.
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
|
<commit_before>from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from IPython.utils.warn import warn
warn("""The widget API is still considered experimental and
may change by the next major release of IPython.""")
<commit_msg>Make the widget experimental error a real python warning
This means it can easily be turned off too.<commit_after>
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from IPython.utils.warn import warn
warn("""The widget API is still considered experimental and
may change by the next major release of IPython.""")
Make the widget experimental error a real python warning
This means it can easily be turned off too.from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
|
<commit_before>from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from IPython.utils.warn import warn
warn("""The widget API is still considered experimental and
may change by the next major release of IPython.""")
<commit_msg>Make the widget experimental error a real python warning
This means it can easily be turned off too.<commit_after>from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
|
c7e55bfd8284c4bb6755abc51dd7c940bca9d81a
|
sensor_consumers/dust_node.py
|
sensor_consumers/dust_node.py
|
# coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5)
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
|
# coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5),
"sound_level": data["data"]["sound_level"],
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
|
Add sound level to influx
|
Add sound level to influx
|
Python
|
bsd-3-clause
|
ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display
|
# coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5)
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
Add sound level to influx
|
# coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5),
"sound_level": data["data"]["sound_level"],
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before># coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5)
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Add sound level to influx<commit_after>
|
# coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5),
"sound_level": data["data"]["sound_level"],
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
|
# coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5)
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
Add sound level to influx# coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5),
"sound_level": data["data"]["sound_level"],
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before># coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5)
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Add sound level to influx<commit_after># coding=utf-8
from local_settings import *
from utils import SensorConsumerBase
import redis
import datetime
import sys
class DustNode(SensorConsumerBase):
def __init__(self):
SensorConsumerBase.__init__(self, "indoor_air_quality")
def run(self):
self.subscribe("dust-node-pubsub", self.pubsub_callback)
def pubsub_callback(self, data):
if "action" in data:
return
influx_data = {
"measurement": "dustnode",
"timestamp": data["utctimestamp"].isoformat() + "Z",
"fields": {
"room_humidity": data["data"]["room_humidity"],
"room_temperature": round(data["data"]["room_temperature"], 1),
"barometer_temperature": round(data["data"]["barometer_temperature"], 1),
"barometer_pressure": round(data["data"]["barometer_reading"], 1),
"dust_density": round(data["data"]["dust_density"], 5),
"sound_level": data["data"]["sound_level"],
}
}
self.insert_into_influx([influx_data])
def main():
item = DustNode()
item.run()
return 0
if __name__ == '__main__':
sys.exit(main())
|
4369de9f0f44860f27d26f6814dc100fefe421be
|
test_urls.py
|
test_urls.py
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'temp.views.home', name='home'),
# url(r'^temp/', include('temp.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
|
import django
if django.VERSION >= (1,10):
from django.conf.urls import include, url
patterns = lambda _ignore, x: list([x,])
else:
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
|
Fix tests for Django 1.10
|
Fix tests for Django 1.10
|
Python
|
mit
|
AliLozano/django-messages-extends,AliLozano/django-messages-extends,AliLozano/django-messages-extends
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'temp.views.home', name='home'),
# url(r'^temp/', include('temp.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
Fix tests for Django 1.10
|
import django
if django.VERSION >= (1,10):
from django.conf.urls import include, url
patterns = lambda _ignore, x: list([x,])
else:
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
|
<commit_before>from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'temp.views.home', name='home'),
# url(r'^temp/', include('temp.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
<commit_msg>Fix tests for Django 1.10<commit_after>
|
import django
if django.VERSION >= (1,10):
from django.conf.urls import include, url
patterns = lambda _ignore, x: list([x,])
else:
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'temp.views.home', name='home'),
# url(r'^temp/', include('temp.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
Fix tests for Django 1.10import django
if django.VERSION >= (1,10):
from django.conf.urls import include, url
patterns = lambda _ignore, x: list([x,])
else:
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
|
<commit_before>from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'temp.views.home', name='home'),
# url(r'^temp/', include('temp.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
<commit_msg>Fix tests for Django 1.10<commit_after>import django
if django.VERSION >= (1,10):
from django.conf.urls import include, url
patterns = lambda _ignore, x: list([x,])
else:
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^messages/', include('messages_extends.urls', namespace='messages')),
)
|
b2354fdde28bf841bebfc1f5347b2bde3c3cc390
|
db/TableBill.py
|
db/TableBill.py
|
{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
|
{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "PID",
PDBConst.Attributes: ["int", "not null"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
|
Fix bill table missed column
|
Fix bill table missed column
|
Python
|
mit
|
eddiedb6/ej,eddiedb6/ej,eddiedb6/ej
|
{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
Fix bill table missed column
|
{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "PID",
PDBConst.Attributes: ["int", "not null"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
|
<commit_before>{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
<commit_msg>Fix bill table missed column<commit_after>
|
{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "PID",
PDBConst.Attributes: ["int", "not null"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
|
{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
Fix bill table missed column{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "PID",
PDBConst.Attributes: ["int", "not null"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
|
<commit_before>{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
<commit_msg>Fix bill table missed column<commit_after>{
PDBConst.Name: "bill",
PDBConst.Columns: [
{
PDBConst.Name: "ID",
PDBConst.Attributes: ["int", "not null", "auto_increment", "primary key"]
},
{
PDBConst.Name: "PID",
PDBConst.Attributes: ["int", "not null"]
},
{
PDBConst.Name: "Datetime",
PDBConst.Attributes: ["datetime", "not null"]
},
{
PDBConst.Name: "Amount",
PDBConst.Attributes: ["double(12,2)", "not null"]
},
{
PDBConst.Name: "Currency",
PDBConst.Attributes: ["tinyint", "not null", "default 1"]
},
{
PDBConst.Name: "Category",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "PaymentMode",
PDBConst.Attributes: ["tinyint"]
},
{
PDBConst.Name: "Note",
PDBConst.Attributes: ["varchar(255)"]
}]
}
|
c1f31f69ca7ba75185100cf7a8eabf58ed41ccdf
|
atmo/apps.py
|
atmo/apps.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(assign_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_post_save_assign_perm')
pre_delete.connect(remove_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_pre_delete_remove_perm')
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.models import SparkJob
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(
assign_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_post_save_assign_perm',
)
pre_delete.connect(
remove_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_pre_delete_remove_perm',
)
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
|
Connect signal callback using the model class as sender.
|
Connect signal callback using the model class as sender.
|
Python
|
mpl-2.0
|
mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service,mozilla/telemetry-analysis-service
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(assign_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_post_save_assign_perm')
pre_delete.connect(remove_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_pre_delete_remove_perm')
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
Connect signal callback using the model class as sender.
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.models import SparkJob
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(
assign_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_post_save_assign_perm',
)
pre_delete.connect(
remove_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_pre_delete_remove_perm',
)
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(assign_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_post_save_assign_perm')
pre_delete.connect(remove_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_pre_delete_remove_perm')
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
<commit_msg>Connect signal callback using the model class as sender.<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.models import SparkJob
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(
assign_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_post_save_assign_perm',
)
pre_delete.connect(
remove_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_pre_delete_remove_perm',
)
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(assign_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_post_save_assign_perm')
pre_delete.connect(remove_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_pre_delete_remove_perm')
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
Connect signal callback using the model class as sender.# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.models import SparkJob
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(
assign_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_post_save_assign_perm',
)
pre_delete.connect(
remove_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_pre_delete_remove_perm',
)
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(assign_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_post_save_assign_perm')
pre_delete.connect(remove_group_perm, sender='jobs.SparkJob',
dispatch_uid='sparkjob_pre_delete_remove_perm')
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
<commit_msg>Connect signal callback using the model class as sender.<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, you can obtain one at http://mozilla.org/MPL/2.0/.
import logging
import session_csrf
from django.apps import AppConfig
from django.db.models.signals import post_save, pre_delete
DEFAULT_JOB_TIMEOUT = 15
logger = logging.getLogger("django")
class AtmoAppConfig(AppConfig):
name = 'atmo'
def ready(self):
# The app is now ready. Include any monkey patches here.
# Monkey patch CSRF to switch to session based CSRF. Session
# based CSRF will prevent attacks from apps under the same
# domain. If you're planning to host your app under it's own
# domain you can remove session_csrf and use Django's CSRF
# library. See also
# https://github.com/mozilla/sugardough/issues/38
session_csrf.monkeypatch()
# Connect signals.
from atmo.jobs.models import SparkJob
from atmo.jobs.signals import assign_group_perm, remove_group_perm
post_save.connect(
assign_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_post_save_assign_perm',
)
pre_delete.connect(
remove_group_perm,
sender=SparkJob,
dispatch_uid='sparkjob_pre_delete_remove_perm',
)
class KeysAppConfig(AppConfig):
name = 'atmo.keys'
label = 'keys'
verbose_name = 'Keys'
|
6ec4c21cf7af09401aabadff79898fe783efe9bd
|
skimage/transform/__init__.py
|
skimage/transform/__init__.py
|
from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, swirl, homography
|
from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, rotate, swirl, homography
|
Fix import of rotate function
|
Fix import of rotate function
|
Python
|
bsd-3-clause
|
SamHames/scikit-image,michaelpacer/scikit-image,ofgulban/scikit-image,paalge/scikit-image,jwiggins/scikit-image,SamHames/scikit-image,almarklein/scikit-image,rjeli/scikit-image,blink1073/scikit-image,ofgulban/scikit-image,almarklein/scikit-image,SamHames/scikit-image,almarklein/scikit-image,GaZ3ll3/scikit-image,chriscrosscutler/scikit-image,blink1073/scikit-image,oew1v07/scikit-image,ClinicalGraphics/scikit-image,chriscrosscutler/scikit-image,dpshelio/scikit-image,chintak/scikit-image,emon10005/scikit-image,youprofit/scikit-image,michaelpacer/scikit-image,juliusbierk/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,SamHames/scikit-image,robintw/scikit-image,GaZ3ll3/scikit-image,juliusbierk/scikit-image,Hiyorimi/scikit-image,dpshelio/scikit-image,michaelaye/scikit-image,vighneshbirodkar/scikit-image,Britefury/scikit-image,pratapvardhan/scikit-image,ClinicalGraphics/scikit-image,keflavich/scikit-image,almarklein/scikit-image,oew1v07/scikit-image,ajaybhat/scikit-image,vighneshbirodkar/scikit-image,keflavich/scikit-image,michaelaye/scikit-image,vighneshbirodkar/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,Hiyorimi/scikit-image,rjeli/scikit-image,ajaybhat/scikit-image,warmspringwinds/scikit-image,bsipocz/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,paalge/scikit-image,Midafi/scikit-image,rjeli/scikit-image,Britefury/scikit-image,jwiggins/scikit-image,robintw/scikit-image,bsipocz/scikit-image,pratapvardhan/scikit-image,WarrenWeckesser/scikits-image,bennlich/scikit-image,Midafi/scikit-image,newville/scikit-image,bennlich/scikit-image,chintak/scikit-image,newville/scikit-image,chintak/scikit-image,youprofit/scikit-image
|
from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, swirl, homography
Fix import of rotate function
|
from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, rotate, swirl, homography
|
<commit_before>from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, swirl, homography
<commit_msg>Fix import of rotate function<commit_after>
|
from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, rotate, swirl, homography
|
from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, swirl, homography
Fix import of rotate functionfrom .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, rotate, swirl, homography
|
<commit_before>from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, swirl, homography
<commit_msg>Fix import of rotate function<commit_after>from .hough_transform import *
from .radon_transform import *
from .finite_radon_transform import *
from .integral import *
from ._geometric import (estimate_transform,
SimilarityTransform, AffineTransform,
ProjectiveTransform, PolynomialTransform)
from ._warps import warp, warp_coords, rotate, swirl, homography
|
c9f990ff4095b7fb361b2d59c0c5b2c9555643ff
|
csunplugged/tests/BaseTest.py
|
csunplugged/tests/BaseTest.py
|
"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
<<<<<<< HEAD
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
|
"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
|
Remove left over merge conflict text
|
Remove left over merge conflict text
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
<<<<<<< HEAD
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
Remove left over merge conflict text
|
"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
|
<commit_before>"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
<<<<<<< HEAD
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
<commit_msg>Remove left over merge conflict text<commit_after>
|
"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
|
"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
<<<<<<< HEAD
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
Remove left over merge conflict text"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
|
<commit_before>"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
<<<<<<< HEAD
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
<commit_msg>Remove left over merge conflict text<commit_after>"""Base test class with methods implemented for Django testing."""
from django.test import TestCase
from django.contrib.auth.models import User
from django.test.client import Client
from django.utils.translation import activate
class BaseTest(SimpleTestCase):
"""Base test class with methods implemented for Django testing."""
def __init__(self, *args, **kwargs):
"""Create the BaseTest object by calling the parent's constructor."""
super().__init__(*args, **kwargs)
self.language = None
@classmethod
def setUpClass(cls):
"""Automatically called before tests in class."""
super(BaseTest, cls).setUpClass()
@classmethod
def tearDownClass(cls):
"""Automatically called after each test."""
super(BaseTest, cls).tearDownClass()
def setUp(self):
"""Automatically called before each test.
Sets the language if specified and creates a new client.
"""
if self.language is not None:
activate(self.language)
self.client = Client()
def tearDown(self):
"""Automatically called after each test.
Deletes test user.
"""
pass
|
0ab2da918cbf0e58cf850f6868f5b896ea5c3893
|
heufybot/modules/util/nickservid.py
|
heufybot/modules/util/nickservid.py
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config["nickserv_nick"]
if "nickserv_pass" not in self.bot.config:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config["nickserv_pass"]
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config and "nickserv_nick" not in self.bot.config["servers"][serverName]:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config.serverItemWithDefault(serverName, "nickserv_nick", "NickServ")
if "nickserv_pass" not in self.bot.config and "nickserv_pass" not in self.bot.config["servers"][serverName]:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config.serverItemWithDefault(serverName, "nickserv_pass", None)
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
|
Make NickServIdentify play nice with service specific configs
|
Make NickServIdentify play nice with service specific configs
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config["nickserv_nick"]
if "nickserv_pass" not in self.bot.config:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config["nickserv_pass"]
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
Make NickServIdentify play nice with service specific configs
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config and "nickserv_nick" not in self.bot.config["servers"][serverName]:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config.serverItemWithDefault(serverName, "nickserv_nick", "NickServ")
if "nickserv_pass" not in self.bot.config and "nickserv_pass" not in self.bot.config["servers"][serverName]:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config.serverItemWithDefault(serverName, "nickserv_pass", None)
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
|
<commit_before>from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config["nickserv_nick"]
if "nickserv_pass" not in self.bot.config:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config["nickserv_pass"]
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
<commit_msg>Make NickServIdentify play nice with service specific configs<commit_after>
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config and "nickserv_nick" not in self.bot.config["servers"][serverName]:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config.serverItemWithDefault(serverName, "nickserv_nick", "NickServ")
if "nickserv_pass" not in self.bot.config and "nickserv_pass" not in self.bot.config["servers"][serverName]:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config.serverItemWithDefault(serverName, "nickserv_pass", None)
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config["nickserv_nick"]
if "nickserv_pass" not in self.bot.config:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config["nickserv_pass"]
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
Make NickServIdentify play nice with service specific configsfrom twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config and "nickserv_nick" not in self.bot.config["servers"][serverName]:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config.serverItemWithDefault(serverName, "nickserv_nick", "NickServ")
if "nickserv_pass" not in self.bot.config and "nickserv_pass" not in self.bot.config["servers"][serverName]:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config.serverItemWithDefault(serverName, "nickserv_pass", None)
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
|
<commit_before>from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config["nickserv_nick"]
if "nickserv_pass" not in self.bot.config:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config["nickserv_pass"]
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
<commit_msg>Make NickServIdentify play nice with service specific configs<commit_after>from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
import logging
class NickServIdentify(BotModule):
implements(IPlugin, IBotModule)
name = "NickServIdentify"
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("welcome", 1, self.identify) ]
def identify(self, serverName):
if not self.bot.moduleHandler.useModuleOnServer(self.name, serverName):
return
if "nickserv_nick" not in self.bot.config and "nickserv_nick" not in self.bot.config["servers"][serverName]:
nick = "NickServ"
self.bot.servers[serverName].log("No valid NickServ nickname was found; defaulting to NickServ...",
level=logging.WARNING)
else:
nick = self.bot.config.serverItemWithDefault(serverName, "nickserv_nick", "NickServ")
if "nickserv_pass" not in self.bot.config and "nickserv_pass" not in self.bot.config["servers"][serverName]:
self.bot.servers[serverName].log("No NickServ password found. Aborting authentication...",
level=logging.ERROR)
return
password = self.bot.config.serverItemWithDefault(serverName, "nickserv_pass", None)
self.bot.servers[serverName].outputHandler.cmdPRIVMSG(nick, "IDENTIFY {}".format(password))
nickServID = NickServIdentify()
|
35d14348ce419421bba2b043ea2818c185526301
|
ratechecker/migrations/0002_remove_fee_loader.py
|
ratechecker/migrations/0002_remove_fee_loader.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
Comment out fix_fee_product_index from migration
|
Comment out fix_fee_product_index from migration
|
Python
|
cc0-1.0
|
cfpb/owning-a-home-api
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
Comment out fix_fee_product_index from migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
<commit_msg>Comment out fix_fee_product_index from migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
Comment out fix_fee_product_index from migration# -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
table_name = 'cfpb.ratechecker_fee'
index_name = 'idx_16977_product_id'
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
<commit_msg>Comment out fix_fee_product_index from migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.22 on 2019-10-31 16:33
from __future__ import unicode_literals
from django.db import migrations, OperationalError, ProgrammingError
def fix_fee_product_index(apps, schema_editor):
try:
schema_editor.execute(
'DROP INDEX idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'DROP CONSTRAINT IF EXISTS idx_16977_product_id;'
'ALTER TABLE cfpb.ratechecker_fee '
'ADD CONSTRAINT idx_16977_product_id '
'UNIQUE (product_id, state_id, lender, single_family, condo, coop);'
)
except (ProgrammingError, OperationalError):
pass
class Migration(migrations.Migration):
dependencies = [
('ratechecker', '0001_initial'),
]
operations = [
#migrations.RunPython(fix_fee_product_index),
migrations.AlterUniqueTogether(
name='fee',
unique_together=set([]),
),
migrations.RemoveField(
model_name='fee',
name='plan',
),
migrations.DeleteModel(
name='Fee',
),
]
|
9dd019c12899045faebd49bc06026c8512609c9e
|
statictemplate/management/commands/statictemplate.py
|
statictemplate/management/commands/statictemplate.py
|
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
assert all((patterns, url, include))
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
|
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
|
Remove assert line from import
|
Remove assert line from import
|
Python
|
bsd-3-clause
|
yakky/django-statictemplate,bdon/django-statictemplate,ojii/django-statictemplate
|
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
assert all((patterns, url, include))
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
Remove assert line from import
|
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
|
<commit_before># -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
assert all((patterns, url, include))
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
<commit_msg>Remove assert line from import<commit_after>
|
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
|
# -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
assert all((patterns, url, include))
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
Remove assert line from import# -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
|
<commit_before># -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
assert all((patterns, url, include))
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
<commit_msg>Remove assert line from import<commit_after># -*- coding: utf-8 -*-
from contextlib import contextmanager
from django.conf import settings
try:
from django.conf.urls.defaults import patterns, url, include
except ImportError:
from django.conf.urls import patterns, url, include # pragma: no cover
from django.core.management.base import BaseCommand
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from django.test.client import Client
@contextmanager
def override_urlconf():
has_old = hasattr(settings, 'ROOT_URLCONF')
old = getattr(settings, 'ROOT_URLCONF', None)
settings.ROOT_URLCONF = 'statictemplate.management.commands.statictemplate'
yield
if has_old:
setattr(settings, 'ROOT_URLCONF', old)
else: # pragma: no cover
delattr(settings, 'ROOT_URLCONF')
def make_static(template):
with override_urlconf():
client = Client()
response = client.get('/', {'template': template})
return response.content
class Command(BaseCommand):
def handle(self, template, **options):
output = make_static(template)
self.stdout.write(output)
def render(request):
template_name = request.GET['template']
return render_to_response(template_name, RequestContext(request))
urlpatterns = patterns('',
url('^$', render),
url('^others', include(settings.ROOT_URLCONF))
)
|
8cb34f4d88184d0c42e8c1fc41f451fa3cd5a6be
|
plugins/keepkey/cmdline.py
|
plugins/keepkey/cmdline.py
|
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
|
from electrum.plugins import hook
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
|
Fix undefined reference error in command line KeepKey plugin.
|
Fix undefined reference error in command line KeepKey plugin.
|
Python
|
mit
|
romanz/electrum,wakiyamap/electrum-mona,vialectrum/vialectrum,romanz/electrum,digitalbitbox/electrum,kyuupichan/electrum,asfin/electrum,pooler/electrum-ltc,vialectrum/vialectrum,kyuupichan/electrum,spesmilo/electrum,digitalbitbox/electrum,cryptapus/electrum,kyuupichan/electrum,digitalbitbox/electrum,wakiyamap/electrum-mona,pooler/electrum-ltc,spesmilo/electrum,cryptapus/electrum,fyookball/electrum,fyookball/electrum,spesmilo/electrum,fujicoin/electrum-fjc,fyookball/electrum,pooler/electrum-ltc,pooler/electrum-ltc,digitalbitbox/electrum,neocogent/electrum,vialectrum/vialectrum,asfin/electrum,asfin/electrum,fujicoin/electrum-fjc,wakiyamap/electrum-mona,romanz/electrum,neocogent/electrum,neocogent/electrum,spesmilo/electrum,wakiyamap/electrum-mona,cryptapus/electrum,fujicoin/electrum-fjc
|
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
Fix undefined reference error in command line KeepKey plugin.
|
from electrum.plugins import hook
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
|
<commit_before>from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
<commit_msg>Fix undefined reference error in command line KeepKey plugin.<commit_after>
|
from electrum.plugins import hook
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
|
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
Fix undefined reference error in command line KeepKey plugin.from electrum.plugins import hook
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
|
<commit_before>from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
<commit_msg>Fix undefined reference error in command line KeepKey plugin.<commit_after>from electrum.plugins import hook
from electrum.util import print_msg, raw_input
from .keepkey import KeepKeyPlugin
from ..hw_wallet import CmdLineHandler
class Plugin(KeepKeyPlugin):
handler = CmdLineHandler()
@hook
def init_keystore(self, keystore):
if not isinstance(keystore, self.keystore_class):
return
keystore.handler = self.handler
|
a1d71466d09e9e1ea2f75eae57e72e0000c65ffc
|
tests/run.py
|
tests/run.py
|
import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATE_DIRS=('tests/templates',),
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': False,
'DIRS': ('tests/templates',),
},
]
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
Add new-style TEMPLATES setting for tests
|
Add new-style TEMPLATES setting for tests
|
Python
|
bsd-2-clause
|
incuna/incuna-mail,incuna/incuna-mail
|
import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATE_DIRS=('tests/templates',),
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
Add new-style TEMPLATES setting for tests
|
import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': False,
'DIRS': ('tests/templates',),
},
]
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
<commit_before>import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATE_DIRS=('tests/templates',),
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
<commit_msg>Add new-style TEMPLATES setting for tests<commit_after>
|
import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': False,
'DIRS': ('tests/templates',),
},
]
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATE_DIRS=('tests/templates',),
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
Add new-style TEMPLATES setting for testsimport sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': False,
'DIRS': ('tests/templates',),
},
]
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
<commit_before>import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATE_DIRS=('tests/templates',),
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
<commit_msg>Add new-style TEMPLATES setting for tests<commit_after>import sys
import django
from colour_runner.django_runner import ColourRunnerMixin
from django.conf import settings
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MIDDLEWARE_CLASSES=(),
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': False,
'DIRS': ('tests/templates',),
},
]
)
if django.VERSION >= (1, 7):
django.setup()
try:
from django.test.runner import DiscoverRunner
except ImportError:
# Django < 1.6
from discover_runner import DiscoverRunner
class Runner(ColourRunnerMixin, DiscoverRunner):
pass
test_runner = Runner(verbosity=1)
failures = test_runner.run_tests(['tests'])
if failures:
sys.exit(1)
|
85ef4988e1f25b586d8dff63b4ade83a2222849f
|
zerver/filters.py
|
zerver/filters.py
|
from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
|
from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken', 'api_key']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
|
Add api_key to filtered variables.
|
Add api_key to filtered variables.
We don't use it yet, but the plan is the migrate there and it's better
to just have the filtering in place.
(imported from commit d0e7f40e8a439b8e8751da954e79b5f67226e5a9)
|
Python
|
apache-2.0
|
akuseru/zulip,wdaher/zulip,jimmy54/zulip,KJin99/zulip,joshisa/zulip,sup95/zulip,Vallher/zulip,jimmy54/zulip,yocome/zulip,LeeRisk/zulip,Batterfii/zulip,kaiyuanheshang/zulip,PaulPetring/zulip,guiquanz/zulip,dxq-git/zulip,littledogboy/zulip,KJin99/zulip,aps-sids/zulip,eeshangarg/zulip,LeeRisk/zulip,xuanhan863/zulip,gigawhitlocks/zulip,peguin40/zulip,aliceriot/zulip,ipernet/zulip,shaunstanislaus/zulip,yocome/zulip,MariaFaBella85/zulip,KJin99/zulip,timabbott/zulip,codeKonami/zulip,verma-varsha/zulip,dxq-git/zulip,johnny9/zulip,tommyip/zulip,umkay/zulip,alliejones/zulip,christi3k/zulip,noroot/zulip,firstblade/zulip,brainwane/zulip,showell/zulip,blaze225/zulip,sharmaeklavya2/zulip,ashwinirudrappa/zulip,ahmadassaf/zulip,amanharitsh123/zulip,tbutter/zulip,so0k/zulip,LeeRisk/zulip,qq1012803704/zulip,cosmicAsymmetry/zulip,Suninus/zulip,dotcool/zulip,atomic-labs/zulip,susansls/zulip,willingc/zulip,jackrzhang/zulip,codeKonami/zulip,paxapy/zulip,jrowan/zulip,joshisa/zulip,ryansnowboarder/zulip,shrikrishnaholla/zulip,JPJPJPOPOP/zulip,arpith/zulip,amallia/zulip,zacps/zulip,voidException/zulip,yuvipanda/zulip,umkay/zulip,ipernet/zulip,levixie/zulip,Jianchun1/zulip,zachallaun/zulip,showell/zulip,sharmaeklavya2/zulip,tdr130/zulip,vaidap/zulip,peiwei/zulip,saitodisse/zulip,dattatreya303/zulip,krtkmj/zulip,samatdav/zulip,paxapy/zulip,EasonYi/zulip,pradiptad/zulip,jeffcao/zulip,zorojean/zulip,praveenaki/zulip,akuseru/zulip,themass/zulip,nicholasbs/zulip,arpitpanwar/zulip,deer-hope/zulip,sharmaeklavya2/zulip,yuvipanda/zulip,Frouk/zulip,niftynei/zulip,easyfmxu/zulip,KingxBanana/zulip,dwrpayne/zulip,wdaher/zulip,verma-varsha/zulip,aliceriot/zulip,sonali0901/zulip,ApsOps/zulip,vabs22/zulip,MayB/zulip,developerfm/zulip,moria/zulip,eastlhu/zulip,lfranchi/zulip,Jianchun1/zulip,huangkebo/zulip,gkotian/zulip,hayderimran7/zulip,zorojean/zulip,dhcrzf/zulip,noroot/zulip,levixie/zulip,tiansiyuan/zulip,jerryge/zulip,ipernet/zulip,vabs22/zulip,wangdeshui/zulip,SmartPeople/zulip,swinghu/zulip,wdaher/zulip,Qgap/zulip,ryanbackman/zulip,johnny9/zulip,arpith/zulip,zofuthan/zulip,zachallaun/zulip,thomasboyt/zulip,shaunstanislaus/zulip,joshisa/zulip,proliming/zulip,aakash-cr7/zulip,hayderimran7/zulip,jeffcao/zulip,yocome/zulip,dotcool/zulip,praveenaki/zulip,hj3938/zulip,fw1121/zulip,ryanbackman/zulip,kou/zulip,Vallher/zulip,JanzTam/zulip,bitemyapp/zulip,jphilipsen05/zulip,kou/zulip,jainayush975/zulip,timabbott/zulip,AZtheAsian/zulip,levixie/zulip,natanovia/zulip,hafeez3000/zulip,hayderimran7/zulip,calvinleenyc/zulip,voidException/zulip,mdavid/zulip,Suninus/zulip,nicholasbs/zulip,bluesea/zulip,tommyip/zulip,adnanh/zulip,dxq-git/zulip,willingc/zulip,Vallher/zulip,hengqujushi/zulip,zulip/zulip,brainwane/zulip,pradiptad/zulip,noroot/zulip,joyhchen/zulip,themass/zulip,peiwei/zulip,Batterfii/zulip,dnmfarrell/zulip,johnnygaddarr/zulip,ashwinirudrappa/zulip,praveenaki/zulip,aps-sids/zulip,itnihao/zulip,hayderimran7/zulip,Jianchun1/zulip,hengqujushi/zulip,voidException/zulip,babbage/zulip,easyfmxu/zulip,guiquanz/zulip,JanzTam/zulip,punchagan/zulip,bastianh/zulip,eastlhu/zulip,Qgap/zulip,glovebx/zulip,atomic-labs/zulip,tiansiyuan/zulip,jeffcao/zulip,thomasboyt/zulip,kaiyuanheshang/zulip,Vallher/zulip,so0k/zulip,rht/zulip,ApsOps/zulip,tbutter/zulip,ikasumiwt/zulip,RobotCaleb/zulip,xuxiao/zulip,arpith/zulip,KingxBanana/zulip,dhcrzf/zulip,TigorC/zulip,wdaher/zulip,natanovia/zulip,jphilipsen05/zulip,hackerkid/zulip,punchagan/zulip,jessedhillon/zulip,amanharitsh123/zulip,umkay/zulip,huangkebo/zulip,armooo/zulip,eastlhu/zulip,mahim97/zulip,ipernet/zulip,so0k/zulip,karamcnair/zulip,johnny9/zulip,dattatreya303/zulip,andersk/zulip,Galexrt/zulip,SmartPeople/zulip,samatdav/zulip,babbage/zulip,Gabriel0402/zulip,hengqujushi/zulip,zwily/zulip,aakash-cr7/zulip,punchagan/zulip,technicalpickles/zulip,ericzhou2008/zulip,noroot/zulip,hustlzp/zulip,technicalpickles/zulip,avastu/zulip,dhcrzf/zulip,zulip/zulip,EasonYi/zulip,vaidap/zulip,hustlzp/zulip,reyha/zulip,reyha/zulip,susansls/zulip,synicalsyntax/zulip,esander91/zulip,Gabriel0402/zulip,jimmy54/zulip,xuanhan863/zulip,samatdav/zulip,hj3938/zulip,brockwhittaker/zulip,zulip/zulip,wavelets/zulip,christi3k/zulip,ApsOps/zulip,niftynei/zulip,jonesgithub/zulip,MariaFaBella85/zulip,reyha/zulip,Frouk/zulip,umkay/zulip,dotcool/zulip,RobotCaleb/zulip,luyifan/zulip,Jianchun1/zulip,dwrpayne/zulip,arpitpanwar/zulip,sharmaeklavya2/zulip,Galexrt/zulip,peguin40/zulip,qq1012803704/zulip,niftynei/zulip,shubhamdhama/zulip,wweiradio/zulip,he15his/zulip,mohsenSy/zulip,Frouk/zulip,shubhamdhama/zulip,babbage/zulip,m1ssou/zulip,technicalpickles/zulip,eeshangarg/zulip,nicholasbs/zulip,jerryge/zulip,JanzTam/zulip,firstblade/zulip,ahmadassaf/zulip,yuvipanda/zulip,zacps/zulip,jackrzhang/zulip,johnnygaddarr/zulip,saitodisse/zulip,bssrdf/zulip,mansilladev/zulip,showell/zulip,RobotCaleb/zulip,avastu/zulip,eastlhu/zulip,johnnygaddarr/zulip,jainayush975/zulip,synicalsyntax/zulip,hackerkid/zulip,MayB/zulip,Batterfii/zulip,arpith/zulip,hj3938/zulip,proliming/zulip,themass/zulip,developerfm/zulip,jeffcao/zulip,zhaoweigg/zulip,huangkebo/zulip,ipernet/zulip,peguin40/zulip,bitemyapp/zulip,JanzTam/zulip,hustlzp/zulip,zorojean/zulip,peguin40/zulip,reyha/zulip,vaidap/zulip,Drooids/zulip,TigorC/zulip,Suninus/zulip,vabs22/zulip,ryanbackman/zulip,bluesea/zulip,avastu/zulip,hengqujushi/zulip,bowlofstew/zulip,ufosky-server/zulip,kaiyuanheshang/zulip,tbutter/zulip,Gabriel0402/zulip,arpitpanwar/zulip,ahmadassaf/zulip,fw1121/zulip,vabs22/zulip,ashwinirudrappa/zulip,wweiradio/zulip,lfranchi/zulip,natanovia/zulip,themass/zulip,bssrdf/zulip,natanovia/zulip,Drooids/zulip,mdavid/zulip,natanovia/zulip,saitodisse/zulip,ikasumiwt/zulip,zofuthan/zulip,EasonYi/zulip,AZtheAsian/zulip,udxxabp/zulip,vakila/zulip,yocome/zulip,xuxiao/zulip,brainwane/zulip,calvinleenyc/zulip,vakila/zulip,shaunstanislaus/zulip,cosmicAsymmetry/zulip,firstblade/zulip,wangdeshui/zulip,bowlofstew/zulip,jonesgithub/zulip,so0k/zulip,kou/zulip,luyifan/zulip,EasonYi/zulip,PaulPetring/zulip,Batterfii/zulip,mohsenSy/zulip,zofuthan/zulip,jessedhillon/zulip,akuseru/zulip,arpitpanwar/zulip,zofuthan/zulip,peiwei/zulip,armooo/zulip,KingxBanana/zulip,xuxiao/zulip,luyifan/zulip,Qgap/zulip,zwily/zulip,ryansnowboarder/zulip,qq1012803704/zulip,udxxabp/zulip,hengqujushi/zulip,stamhe/zulip,grave-w-grave/zulip,dhcrzf/zulip,kokoar/zulip,brockwhittaker/zulip,JPJPJPOPOP/zulip,blaze225/zulip,timabbott/zulip,kaiyuanheshang/zulip,dwrpayne/zulip,DazWorrall/zulip,calvinleenyc/zulip,dhcrzf/zulip,wdaher/zulip,seapasulli/zulip,schatt/zulip,tommyip/zulip,isht3/zulip,moria/zulip,joyhchen/zulip,vakila/zulip,littledogboy/zulip,swinghu/zulip,bastianh/zulip,gigawhitlocks/zulip,LAndreas/zulip,niftynei/zulip,he15his/zulip,bastianh/zulip,kokoar/zulip,jonesgithub/zulip,itnihao/zulip,ufosky-server/zulip,peiwei/zulip,dawran6/zulip,Juanvulcano/zulip,SmartPeople/zulip,Diptanshu8/zulip,zofuthan/zulip,bowlofstew/zulip,shrikrishnaholla/zulip,DazWorrall/zulip,brainwane/zulip,Juanvulcano/zulip,umkay/zulip,RobotCaleb/zulip,codeKonami/zulip,dawran6/zulip,m1ssou/zulip,suxinde2009/zulip,cosmicAsymmetry/zulip,paxapy/zulip,karamcnair/zulip,sup95/zulip,technicalpickles/zulip,avastu/zulip,dawran6/zulip,Diptanshu8/zulip,sonali0901/zulip,stamhe/zulip,bssrdf/zulip,tdr130/zulip,nicholasbs/zulip,aakash-cr7/zulip,rishig/zulip,dnmfarrell/zulip,itnihao/zulip,sonali0901/zulip,sonali0901/zulip,ryanbackman/zulip,JPJPJPOPOP/zulip,PaulPetring/zulip,vakila/zulip,esander91/zulip,zofuthan/zulip,andersk/zulip,he15his/zulip,dattatreya303/zulip,jackrzhang/zulip,wweiradio/zulip,tommyip/zulip,easyfmxu/zulip,lfranchi/zulip,tdr130/zulip,tdr130/zulip,grave-w-grave/zulip,hackerkid/zulip,zachallaun/zulip,alliejones/zulip,willingc/zulip,ikasumiwt/zulip,jerryge/zulip,krtkmj/zulip,aakash-cr7/zulip,showell/zulip,xuanhan863/zulip,pradiptad/zulip,dattatreya303/zulip,jainayush975/zulip,jainayush975/zulip,hj3938/zulip,wavelets/zulip,Suninus/zulip,samatdav/zulip,wangdeshui/zulip,he15his/zulip,Drooids/zulip,he15his/zulip,joyhchen/zulip,itnihao/zulip,cosmicAsymmetry/zulip,jerryge/zulip,easyfmxu/zulip,dxq-git/zulip,Drooids/zulip,jackrzhang/zulip,MayB/zulip,yuvipanda/zulip,reyha/zulip,so0k/zulip,alliejones/zulip,krtkmj/zulip,Suninus/zulip,bluesea/zulip,ahmadassaf/zulip,j831/zulip,qq1012803704/zulip,dotcool/zulip,cosmicAsymmetry/zulip,mdavid/zulip,Suninus/zulip,dxq-git/zulip,JanzTam/zulip,fw1121/zulip,hj3938/zulip,ahmadassaf/zulip,AZtheAsian/zulip,j831/zulip,joyhchen/zulip,Jianchun1/zulip,johnny9/zulip,ryanbackman/zulip,jrowan/zulip,jrowan/zulip,synicalsyntax/zulip,hackerkid/zulip,shrikrishnaholla/zulip,bowlofstew/zulip,tommyip/zulip,RobotCaleb/zulip,rishig/zulip,jainayush975/zulip,eastlhu/zulip,huangkebo/zulip,kaiyuanheshang/zulip,dattatreya303/zulip,isht3/zulip,susansls/zulip,joshisa/zulip,m1ssou/zulip,hafeez3000/zulip,EasonYi/zulip,arpitpanwar/zulip,shaunstanislaus/zulip,Frouk/zulip,swinghu/zulip,KJin99/zulip,zorojean/zulip,schatt/zulip,shubhamdhama/zulip,dnmfarrell/zulip,firstblade/zulip,DazWorrall/zulip,ashwinirudrappa/zulip,Galexrt/zulip,sup95/zulip,zwily/zulip,DazWorrall/zulip,noroot/zulip,rht/zulip,zachallaun/zulip,amyliu345/zulip,voidException/zulip,atomic-labs/zulip,easyfmxu/zulip,karamcnair/zulip,karamcnair/zulip,bluesea/zulip,Galexrt/zulip,fw1121/zulip,eeshangarg/zulip,amyliu345/zulip,Juanvulcano/zulip,jessedhillon/zulip,ikasumiwt/zulip,Qgap/zulip,m1ssou/zulip,sonali0901/zulip,thomasboyt/zulip,zacps/zulip,zwily/zulip,PaulPetring/zulip,dnmfarrell/zulip,j831/zulip,Gabriel0402/zulip,alliejones/zulip,technicalpickles/zulip,bitemyapp/zulip,praveenaki/zulip,andersk/zulip,isht3/zulip,Frouk/zulip,zachallaun/zulip,paxapy/zulip,ericzhou2008/zulip,souravbadami/zulip,esander91/zulip,aliceriot/zulip,ikasumiwt/zulip,souravbadami/zulip,voidException/zulip,jphilipsen05/zulip,glovebx/zulip,susansls/zulip,zhaoweigg/zulip,amallia/zulip,tommyip/zulip,tiansiyuan/zulip,ApsOps/zulip,rishig/zulip,swinghu/zulip,SmartPeople/zulip,Cheppers/zulip,zhaoweigg/zulip,easyfmxu/zulip,wangdeshui/zulip,zofuthan/zulip,samatdav/zulip,thomasboyt/zulip,technicalpickles/zulip,proliming/zulip,luyifan/zulip,brainwane/zulip,stamhe/zulip,willingc/zulip,ryansnowboarder/zulip,suxinde2009/zulip,SmartPeople/zulip,ashwinirudrappa/zulip,Frouk/zulip,vikas-parashar/zulip,andersk/zulip,shubhamdhama/zulip,tiansiyuan/zulip,TigorC/zulip,bitemyapp/zulip,KJin99/zulip,stamhe/zulip,vaidap/zulip,luyifan/zulip,Gabriel0402/zulip,kou/zulip,bssrdf/zulip,joshisa/zulip,Galexrt/zulip,dawran6/zulip,ashwinirudrappa/zulip,blaze225/zulip,bluesea/zulip,codeKonami/zulip,fw1121/zulip,jackrzhang/zulip,levixie/zulip,natanovia/zulip,nicholasbs/zulip,littledogboy/zulip,udxxabp/zulip,guiquanz/zulip,qq1012803704/zulip,mahim97/zulip,atomic-labs/zulip,ryansnowboarder/zulip,Drooids/zulip,hengqujushi/zulip,arpith/zulip,firstblade/zulip,krtkmj/zulip,jerryge/zulip,wavelets/zulip,MariaFaBella85/zulip,hj3938/zulip,MariaFaBella85/zulip,schatt/zulip,thomasboyt/zulip,Vallher/zulip,Diptanshu8/zulip,noroot/zulip,armooo/zulip,dwrpayne/zulip,littledogboy/zulip,johnnygaddarr/zulip,bitemyapp/zulip,grave-w-grave/zulip,ahmadassaf/zulip,bluesea/zulip,Batterfii/zulip,grave-w-grave/zulip,akuseru/zulip,sup95/zulip,jackrzhang/zulip,j831/zulip,hayderimran7/zulip,KJin99/zulip,hengqujushi/zulip,johnny9/zulip,aliceriot/zulip,ryansnowboarder/zulip,hackerkid/zulip,aps-sids/zulip,mohsenSy/zulip,calvinleenyc/zulip,themass/zulip,atomic-labs/zulip,jerryge/zulip,amallia/zulip,gkotian/zulip,mdavid/zulip,zorojean/zulip,ipernet/zulip,LAndreas/zulip,zulip/zulip,vakila/zulip,kou/zulip,fw1121/zulip,bastianh/zulip,voidException/zulip,xuanhan863/zulip,adnanh/zulip,avastu/zulip,zhaoweigg/zulip,schatt/zulip,tbutter/zulip,shrikrishnaholla/zulip,mansilladev/zulip,christi3k/zulip,RobotCaleb/zulip,arpitpanwar/zulip,amanharitsh123/zulip,ufosky-server/zulip,jimmy54/zulip,ufosky-server/zulip,jphilipsen05/zulip,jonesgithub/zulip,jessedhillon/zulip,moria/zulip,levixie/zulip,bowlofstew/zulip,shaunstanislaus/zulip,SmartPeople/zulip,jrowan/zulip,dnmfarrell/zulip,zorojean/zulip,punchagan/zulip,MariaFaBella85/zulip,huangkebo/zulip,saitodisse/zulip,ipernet/zulip,niftynei/zulip,amyliu345/zulip,itnihao/zulip,grave-w-grave/zulip,xuxiao/zulip,tdr130/zulip,bluesea/zulip,PhilSk/zulip,bitemyapp/zulip,peiwei/zulip,armooo/zulip,kokoar/zulip,tbutter/zulip,tdr130/zulip,stamhe/zulip,itnihao/zulip,bssrdf/zulip,schatt/zulip,noroot/zulip,Drooids/zulip,technicalpickles/zulip,seapasulli/zulip,brainwane/zulip,deer-hope/zulip,bssrdf/zulip,rht/zulip,thomasboyt/zulip,rht/zulip,bowlofstew/zulip,schatt/zulip,deer-hope/zulip,peiwei/zulip,zachallaun/zulip,kokoar/zulip,mdavid/zulip,lfranchi/zulip,nicholasbs/zulip,LAndreas/zulip,m1ssou/zulip,Qgap/zulip,aps-sids/zulip,huangkebo/zulip,ahmadassaf/zulip,wavelets/zulip,willingc/zulip,hustlzp/zulip,guiquanz/zulip,voidException/zulip,kokoar/zulip,KingxBanana/zulip,jphilipsen05/zulip,aps-sids/zulip,jonesgithub/zulip,aakash-cr7/zulip,eeshangarg/zulip,JPJPJPOPOP/zulip,calvinleenyc/zulip,dawran6/zulip,ericzhou2008/zulip,babbage/zulip,Juanvulcano/zulip,brockwhittaker/zulip,peguin40/zulip,seapasulli/zulip,bowlofstew/zulip,Frouk/zulip,lfranchi/zulip,pradiptad/zulip,codeKonami/zulip,brockwhittaker/zulip,shaunstanislaus/zulip,amallia/zulip,hafeez3000/zulip,alliejones/zulip,zhaoweigg/zulip,punchagan/zulip,Diptanshu8/zulip,amanharitsh123/zulip,PhilSk/zulip,DazWorrall/zulip,dwrpayne/zulip,thomasboyt/zulip,lfranchi/zulip,wweiradio/zulip,mansilladev/zulip,bssrdf/zulip,seapasulli/zulip,susansls/zulip,Galexrt/zulip,JPJPJPOPOP/zulip,christi3k/zulip,jessedhillon/zulip,showell/zulip,avastu/zulip,karamcnair/zulip,karamcnair/zulip,timabbott/zulip,proliming/zulip,hustlzp/zulip,bitemyapp/zulip,johnnygaddarr/zulip,blaze225/zulip,firstblade/zulip,littledogboy/zulip,lfranchi/zulip,christi3k/zulip,so0k/zulip,suxinde2009/zulip,christi3k/zulip,wdaher/zulip,yuvipanda/zulip,sonali0901/zulip,vakila/zulip,aps-sids/zulip,kaiyuanheshang/zulip,andersk/zulip,m1ssou/zulip,armooo/zulip,xuxiao/zulip,PaulPetring/zulip,Gabriel0402/zulip,reyha/zulip,vaidap/zulip,peguin40/zulip,wdaher/zulip,jackrzhang/zulip,deer-hope/zulip,JPJPJPOPOP/zulip,zulip/zulip,LeeRisk/zulip,xuanhan863/zulip,esander91/zulip,mohsenSy/zulip,esander91/zulip,hafeez3000/zulip,sup95/zulip,PhilSk/zulip,jeffcao/zulip,karamcnair/zulip,EasonYi/zulip,dotcool/zulip,PhilSk/zulip,umkay/zulip,Juanvulcano/zulip,ryansnowboarder/zulip,shaunstanislaus/zulip,dawran6/zulip,jonesgithub/zulip,shrikrishnaholla/zulip,hayderimran7/zulip,eastlhu/zulip,glovebx/zulip,zwily/zulip,dxq-git/zulip,saitodisse/zulip,jimmy54/zulip,developerfm/zulip,mahim97/zulip,easyfmxu/zulip,wweiradio/zulip,showell/zulip,ericzhou2008/zulip,saitodisse/zulip,praveenaki/zulip,zorojean/zulip,Cheppers/zulip,natanovia/zulip,udxxabp/zulip,willingc/zulip,pradiptad/zulip,vikas-parashar/zulip,m1ssou/zulip,atomic-labs/zulip,zacps/zulip,joshisa/zulip,luyifan/zulip,jainayush975/zulip,wangdeshui/zulip,aps-sids/zulip,synicalsyntax/zulip,ApsOps/zulip,avastu/zulip,Cheppers/zulip,zulip/zulip,glovebx/zulip,KingxBanana/zulip,gigawhitlocks/zulip,bastianh/zulip,wavelets/zulip,wweiradio/zulip,glovebx/zulip,gigawhitlocks/zulip,verma-varsha/zulip,verma-varsha/zulip,babbage/zulip,vakila/zulip,jrowan/zulip,LeeRisk/zulip,kaiyuanheshang/zulip,Batterfii/zulip,gigawhitlocks/zulip,timabbott/zulip,TigorC/zulip,Cheppers/zulip,jrowan/zulip,ryanbackman/zulip,jerryge/zulip,mansilladev/zulip,vikas-parashar/zulip,rishig/zulip,hafeez3000/zulip,ericzhou2008/zulip,showell/zulip,armooo/zulip,adnanh/zulip,deer-hope/zulip,paxapy/zulip,rishig/zulip,synicalsyntax/zulip,souravbadami/zulip,brockwhittaker/zulip,johnny9/zulip,ikasumiwt/zulip,PhilSk/zulip,jeffcao/zulip,mansilladev/zulip,jeffcao/zulip,levixie/zulip,so0k/zulip,shrikrishnaholla/zulip,kou/zulip,LeeRisk/zulip,amallia/zulip,jessedhillon/zulip,stamhe/zulip,akuseru/zulip,developerfm/zulip,jonesgithub/zulip,willingc/zulip,themass/zulip,Vallher/zulip,xuanhan863/zulip,tbutter/zulip,qq1012803704/zulip,themass/zulip,grave-w-grave/zulip,gkotian/zulip,zacps/zulip,tommyip/zulip,yuvipanda/zulip,stamhe/zulip,LeeRisk/zulip,littledogboy/zulip,ashwinirudrappa/zulip,Vallher/zulip,JanzTam/zulip,amallia/zulip,johnny9/zulip,guiquanz/zulip,krtkmj/zulip,tiansiyuan/zulip,schatt/zulip,Qgap/zulip,firstblade/zulip,eeshangarg/zulip,Batterfii/zulip,kou/zulip,PhilSk/zulip,hafeez3000/zulip,ApsOps/zulip,gigawhitlocks/zulip,glovebx/zulip,zwily/zulip,amyliu345/zulip,johnnygaddarr/zulip,TigorC/zulip,aliceriot/zulip,j831/zulip,mdavid/zulip,swinghu/zulip,shubhamdhama/zulip,dattatreya303/zulip,esander91/zulip,vabs22/zulip,RobotCaleb/zulip,huangkebo/zulip,itnihao/zulip,timabbott/zulip,KJin99/zulip,jimmy54/zulip,Diptanshu8/zulip,zulip/zulip,xuxiao/zulip,zhaoweigg/zulip,proliming/zulip,jphilipsen05/zulip,joyhchen/zulip,blaze225/zulip,codeKonami/zulip,ericzhou2008/zulip,MayB/zulip,gkotian/zulip,littledogboy/zulip,mahim97/zulip,LAndreas/zulip,Suninus/zulip,pradiptad/zulip,alliejones/zulip,praveenaki/zulip,paxapy/zulip,vaidap/zulip,tiansiyuan/zulip,johnnygaddarr/zulip,atomic-labs/zulip,xuxiao/zulip,codeKonami/zulip,gkotian/zulip,vikas-parashar/zulip,mohsenSy/zulip,zacps/zulip,babbage/zulip,suxinde2009/zulip,ufosky-server/zulip,akuseru/zulip,vikas-parashar/zulip,amyliu345/zulip,Diptanshu8/zulip,wavelets/zulip,niftynei/zulip,DazWorrall/zulip,armooo/zulip,suxinde2009/zulip,tbutter/zulip,esander91/zulip,arpitpanwar/zulip,swinghu/zulip,punchagan/zulip,qq1012803704/zulip,souravbadami/zulip,krtkmj/zulip,mansilladev/zulip,deer-hope/zulip,zhaoweigg/zulip,MayB/zulip,amyliu345/zulip,moria/zulip,synicalsyntax/zulip,isht3/zulip,adnanh/zulip,TigorC/zulip,developerfm/zulip,Qgap/zulip,calvinleenyc/zulip,MayB/zulip,rishig/zulip,Juanvulcano/zulip,gkotian/zulip,dotcool/zulip,mahim97/zulip,arpith/zulip,vabs22/zulip,zachallaun/zulip,wweiradio/zulip,ikasumiwt/zulip,rht/zulip,mansilladev/zulip,JanzTam/zulip,pradiptad/zulip,Cheppers/zulip,DazWorrall/zulip,aakash-cr7/zulip,cosmicAsymmetry/zulip,amallia/zulip,dhcrzf/zulip,eastlhu/zulip,developerfm/zulip,souravbadami/zulip,udxxabp/zulip,guiquanz/zulip,souravbadami/zulip,KingxBanana/zulip,praveenaki/zulip,jimmy54/zulip,Drooids/zulip,deer-hope/zulip,isht3/zulip,samatdav/zulip,moria/zulip,gigawhitlocks/zulip,hj3938/zulip,kokoar/zulip,wangdeshui/zulip,hayderimran7/zulip,aliceriot/zulip,ufosky-server/zulip,mahim97/zulip,Jianchun1/zulip,Cheppers/zulip,amanharitsh123/zulip,MayB/zulip,rht/zulip,joshisa/zulip,AZtheAsian/zulip,hustlzp/zulip,gkotian/zulip,saitodisse/zulip,MariaFaBella85/zulip,eeshangarg/zulip,vikas-parashar/zulip,mohsenSy/zulip,isht3/zulip,ryansnowboarder/zulip,adnanh/zulip,sharmaeklavya2/zulip,moria/zulip,punchagan/zulip,dxq-git/zulip,bastianh/zulip,babbage/zulip,timabbott/zulip,udxxabp/zulip,shubhamdhama/zulip,brockwhittaker/zulip,sup95/zulip,yocome/zulip,AZtheAsian/zulip,hafeez3000/zulip,PaulPetring/zulip,udxxabp/zulip,LAndreas/zulip,developerfm/zulip,alliejones/zulip,bastianh/zulip,andersk/zulip,seapasulli/zulip,joyhchen/zulip,akuseru/zulip,Gabriel0402/zulip,moria/zulip,ufosky-server/zulip,tiansiyuan/zulip,dwrpayne/zulip,guiquanz/zulip,eeshangarg/zulip,suxinde2009/zulip,LAndreas/zulip,synicalsyntax/zulip,wangdeshui/zulip,shubhamdhama/zulip,LAndreas/zulip,tdr130/zulip,yocome/zulip,zwily/zulip,MariaFaBella85/zulip,dhcrzf/zulip,xuanhan863/zulip,nicholasbs/zulip,sharmaeklavya2/zulip,luyifan/zulip,suxinde2009/zulip,rishig/zulip,glovebx/zulip,susansls/zulip,andersk/zulip,hustlzp/zulip,proliming/zulip,yuvipanda/zulip,seapasulli/zulip,Cheppers/zulip,verma-varsha/zulip,PaulPetring/zulip,EasonYi/zulip,dwrpayne/zulip,proliming/zulip,ApsOps/zulip,dotcool/zulip,umkay/zulip,AZtheAsian/zulip,aliceriot/zulip,dnmfarrell/zulip,brainwane/zulip,krtkmj/zulip,dnmfarrell/zulip,Galexrt/zulip,jessedhillon/zulip,verma-varsha/zulip,fw1121/zulip,blaze225/zulip,seapasulli/zulip,j831/zulip,he15his/zulip,yocome/zulip,hackerkid/zulip,levixie/zulip,wavelets/zulip,adnanh/zulip,amanharitsh123/zulip,hackerkid/zulip,adnanh/zulip,he15his/zulip,kokoar/zulip,ericzhou2008/zulip,peiwei/zulip,shrikrishnaholla/zulip,mdavid/zulip,rht/zulip,swinghu/zulip
|
from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
Add api_key to filtered variables.
We don't use it yet, but the plan is the migrate there and it's better
to just have the filtering in place.
(imported from commit d0e7f40e8a439b8e8751da954e79b5f67226e5a9)
|
from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken', 'api_key']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
|
<commit_before>from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
<commit_msg>Add api_key to filtered variables.
We don't use it yet, but the plan is the migrate there and it's better
to just have the filtering in place.
(imported from commit d0e7f40e8a439b8e8751da954e79b5f67226e5a9)<commit_after>
|
from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken', 'api_key']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
|
from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
Add api_key to filtered variables.
We don't use it yet, but the plan is the migrate there and it's better
to just have the filtering in place.
(imported from commit d0e7f40e8a439b8e8751da954e79b5f67226e5a9)from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken', 'api_key']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
|
<commit_before>from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
<commit_msg>Add api_key to filtered variables.
We don't use it yet, but the plan is the migrate there and it's better
to just have the filtering in place.
(imported from commit d0e7f40e8a439b8e8751da954e79b5f67226e5a9)<commit_after>from __future__ import absolute_import
from django.views.debug import SafeExceptionReporterFilter
from django.http import build_request_repr
class ZulipExceptionReporterFilter(SafeExceptionReporterFilter):
def get_post_parameters(self, request):
filtered_post = SafeExceptionReporterFilter.get_post_parameters(self, request).copy()
filtered_vars = ['content', 'secret', 'password', 'key', 'api-key', 'subject', 'stream',
'subscriptions', 'to', 'csrfmiddlewaretoken', 'api_key']
for var in filtered_vars:
if var in filtered_post:
filtered_post[var] = '**********'
return filtered_post
def get_request_repr(self, request):
if request is None:
return repr(None)
else:
return build_request_repr(request,
POST_override=self.get_post_parameters(request),
COOKIES_override="**********",
META_override="**********")
|
8be7205128eb96fd52dc922ff45aa5356a59d318
|
src/main/translator-xml/PMLToXML.py
|
src/main/translator-xml/PMLToXML.py
|
#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
return_code = subprocess.call("Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
|
#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
# TODO: Remove abs-path
return_code = subprocess.call("/opt/pml-bnfc/xml/Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
|
Change paths in PML-XML tool
|
Change paths in PML-XML tool
|
Python
|
mit
|
CS4098/GroupProject,CS4098/GroupProject,CS4098/GroupProject
|
#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
return_code = subprocess.call("Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
Change paths in PML-XML tool
|
#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
# TODO: Remove abs-path
return_code = subprocess.call("/opt/pml-bnfc/xml/Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
return_code = subprocess.call("Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
<commit_msg>Change paths in PML-XML tool<commit_after>
|
#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
# TODO: Remove abs-path
return_code = subprocess.call("/opt/pml-bnfc/xml/Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
|
#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
return_code = subprocess.call("Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
Change paths in PML-XML tool#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
# TODO: Remove abs-path
return_code = subprocess.call("/opt/pml-bnfc/xml/Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
return_code = subprocess.call("Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
<commit_msg>Change paths in PML-XML tool<commit_after>#!/usr/bin/env/python
import sys
import os.path
import subprocess
# Read in a pml file and save to an xml file
def translate_pml_file(xml_file, pml_file):
pml_path = os.path.abspath(pml_file.name)
xml_path = os.path.abspath(xml_file.name)
# Call XML generator
# TODO: Remove abs-path
return_code = subprocess.call("/opt/pml-bnfc/xml/Pmlxml %s %s" % (xml_path, pml_path), shell=True)
if return_code != 0:
print "Error occured reading PML file, exiting."
sys.exit(1)
def main():
import argparse
parser = argparse.ArgumentParser(description="Program to output the ast of a PML program in XML format")
parser.add_argument('-x', '--xml', required=True, type=file, help="Output abstract syntax tree in XML format")
parser.add_argument('-p', '--pml', required=True, type=file, help="Input PML file")
try:
args = parser.parse_args()
translate_pml_file(args.xml, args.pml)
except IOError, msg:
parser.error(str(msg))
if __name__ == "__main__":
main()
|
0efeaa258b19d5b1ba204cc55fbdb6969e0f3e64
|
flake8_respect_noqa.py
|
flake8_respect_noqa.py
|
# -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
import pep8
class RespectNoqaReport(pep8.StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and pep8.noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
|
# -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
try:
from pep8 import StandardReport, noqa
except ImportError:
# Try the new (as of 2016-June) pycodestyle package.
from pycodestyle import StandardReport, noqa
class RespectNoqaReport(StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
|
Adjust for pep8 package rename.
|
Adjust for pep8 package rename.
Closes #1
|
Python
|
mit
|
spookylukey/flake8-respect-noqa
|
# -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
import pep8
class RespectNoqaReport(pep8.StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and pep8.noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
Adjust for pep8 package rename.
Closes #1
|
# -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
try:
from pep8 import StandardReport, noqa
except ImportError:
# Try the new (as of 2016-June) pycodestyle package.
from pycodestyle import StandardReport, noqa
class RespectNoqaReport(StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
|
<commit_before># -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
import pep8
class RespectNoqaReport(pep8.StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and pep8.noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
<commit_msg>Adjust for pep8 package rename.
Closes #1<commit_after>
|
# -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
try:
from pep8 import StandardReport, noqa
except ImportError:
# Try the new (as of 2016-June) pycodestyle package.
from pycodestyle import StandardReport, noqa
class RespectNoqaReport(StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
|
# -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
import pep8
class RespectNoqaReport(pep8.StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and pep8.noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
Adjust for pep8 package rename.
Closes #1# -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
try:
from pep8 import StandardReport, noqa
except ImportError:
# Try the new (as of 2016-June) pycodestyle package.
from pycodestyle import StandardReport, noqa
class RespectNoqaReport(StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
|
<commit_before># -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
import pep8
class RespectNoqaReport(pep8.StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and pep8.noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
<commit_msg>Adjust for pep8 package rename.
Closes #1<commit_after># -*- coding: utf-8 -*-
"""
Always ignore lines with '# noqa'
"""
__version__ = 0.2
try:
from pep8 import StandardReport, noqa
except ImportError:
# Try the new (as of 2016-June) pycodestyle package.
from pycodestyle import StandardReport, noqa
class RespectNoqaReport(StandardReport):
def error(self, line_number, offset, text, check):
if len(self.lines) > line_number - 1 and noqa(self.lines[line_number - 1]):
return
else:
return super(RespectNoqaReport, self).error(line_number, offset,
text, check)
class RespectNoqa(object):
name = 'flake8-respect-noqa'
version = __version__
def __init__(self, *args, **kwargs):
pass
@classmethod
def parse_options(cls, options):
# The following only works with (flake8 2.4.1) if you run like "flake8 -j 1",
# or put "jobs = 1" in your [flake8] config.
# Otherwise, flake8 replaces this reported with it's own.
# See https://gitlab.com/pycqa/flake8/issues/66
options.reporter = RespectNoqaReport
options.report = RespectNoqaReport(options)
|
e37eba5f9430cfa3c3cf081066e7079e5c564e95
|
generic_scaffold/templatetags/generic_scaffold_tags.py
|
generic_scaffold/templatetags/generic_scaffold_tags.py
|
from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.simple_tag
def get_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
@register.assignment_tag
def set_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
|
from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.assignment_tag
def set_urls_for_scaffold(app=None, model=None, prefix=None):
url_name = get_url_names(app, model, prefix)
return url_name
|
Improve templatetag to use either prefix or ...
|
Improve templatetag to use either prefix or ...
app/model
|
Python
|
mit
|
spapas/django-generic-scaffold,spapas/django-generic-scaffold
|
from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.simple_tag
def get_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
@register.assignment_tag
def set_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
Improve templatetag to use either prefix or ...
app/model
|
from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.assignment_tag
def set_urls_for_scaffold(app=None, model=None, prefix=None):
url_name = get_url_names(app, model, prefix)
return url_name
|
<commit_before>from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.simple_tag
def get_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
@register.assignment_tag
def set_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
<commit_msg>Improve templatetag to use either prefix or ...
app/model<commit_after>
|
from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.assignment_tag
def set_urls_for_scaffold(app=None, model=None, prefix=None):
url_name = get_url_names(app, model, prefix)
return url_name
|
from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.simple_tag
def get_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
@register.assignment_tag
def set_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
Improve templatetag to use either prefix or ...
app/modelfrom django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.assignment_tag
def set_urls_for_scaffold(app=None, model=None, prefix=None):
url_name = get_url_names(app, model, prefix)
return url_name
|
<commit_before>from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.simple_tag
def get_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
@register.assignment_tag
def set_url_for_action(prefix, action):
url = get_url_names(prefix)[action]
return url
<commit_msg>Improve templatetag to use either prefix or ...
app/model<commit_after>from django import template
from django.conf import settings
from generic_scaffold import get_url_names
register = template.Library()
@register.assignment_tag
def set_urls_for_scaffold(app=None, model=None, prefix=None):
url_name = get_url_names(app, model, prefix)
return url_name
|
f8d980de69607e73f207fea808c3b0558a4159c0
|
pyconcz_2016/cfp/models.py
|
pyconcz_2016/cfp/models.py
|
from django.db import models
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
|
from django.db import models
from django.utils.timezone import now
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
date = models.DateTimeField(default=now)
|
Add date and social media fields to proposal
|
Add date and social media fields to proposal
|
Python
|
mit
|
pyvec/cz.pycon.org-2017,pyvec/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2016,pyvec/cz.pycon.org-2016,benabraham/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2017,pyvec/cz.pycon.org-2016
|
from django.db import models
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
Add date and social media fields to proposal
|
from django.db import models
from django.utils.timezone import now
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
date = models.DateTimeField(default=now)
|
<commit_before>from django.db import models
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
<commit_msg>Add date and social media fields to proposal<commit_after>
|
from django.db import models
from django.utils.timezone import now
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
date = models.DateTimeField(default=now)
|
from django.db import models
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
Add date and social media fields to proposalfrom django.db import models
from django.utils.timezone import now
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
date = models.DateTimeField(default=now)
|
<commit_before>from django.db import models
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
<commit_msg>Add date and social media fields to proposal<commit_after>from django.db import models
from django.utils.timezone import now
from pyconcz_2016.conferences.models import Conference
class Cfp(models.Model):
conference = models.ForeignKey(Conference, related_name="cfps")
title = models.CharField(max_length=200)
date_start = models.DateTimeField()
date_end = models.DateTimeField()
class Meta:
ordering = ['date_start']
def __str__(self):
return self.title
class Proposal(models.Model):
DIFFICULTY = (
('all', 'All'),
('beginner', 'Beginner'),
('advanced', 'Advanced'),
)
cfp = models.ForeignKey(Cfp, related_name='proposals')
# Public speaker info
full_name = models.CharField(max_length=200)
bio = models.TextField()
twitter = models.CharField(max_length=20, blank=True)
github = models.CharField(max_length=20, blank=True)
# Public talk info
title = models.CharField(max_length=200)
abstract = models.TextField()
difficulty = models.CharField(
max_length=10, choices=DIFFICULTY, default='all')
# Private notes (for reviewers only)
note = models.TextField()
date = models.DateTimeField(default=now)
|
bee9373dcf852e7af9f0f1a78dcc17a0922f96fe
|
anchorhub/tests/test_main.py
|
anchorhub/tests/test_main.py
|
"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main(['.'])
|
"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
from anchorhub.util.getanchorhubpath import get_anchorhub_path
from anchorhub.compatibility import get_path_separator
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main([get_anchorhub_path() + get_path_separator() +
'../sample/multi-file'])
|
Modify main.py tests to use get_anchorhub_path()
|
Modify main.py tests to use get_anchorhub_path()
|
Python
|
apache-2.0
|
samjabrahams/anchorhub
|
"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main(['.'])
Modify main.py tests to use get_anchorhub_path()
|
"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
from anchorhub.util.getanchorhubpath import get_anchorhub_path
from anchorhub.compatibility import get_path_separator
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main([get_anchorhub_path() + get_path_separator() +
'../sample/multi-file'])
|
<commit_before>"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main(['.'])
<commit_msg>Modify main.py tests to use get_anchorhub_path()<commit_after>
|
"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
from anchorhub.util.getanchorhubpath import get_anchorhub_path
from anchorhub.compatibility import get_path_separator
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main([get_anchorhub_path() + get_path_separator() +
'../sample/multi-file'])
|
"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main(['.'])
Modify main.py tests to use get_anchorhub_path()"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
from anchorhub.util.getanchorhubpath import get_anchorhub_path
from anchorhub.compatibility import get_path_separator
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main([get_anchorhub_path() + get_path_separator() +
'../sample/multi-file'])
|
<commit_before>"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main(['.'])
<commit_msg>Modify main.py tests to use get_anchorhub_path()<commit_after>"""
test_main.py - Tests for main.py
main.py:
http://www.github.com/samjabrahams/anchorhub/main.py
"""
from nose.tools import *
import anchorhub.main as main
from anchorhub.util.getanchorhubpath import get_anchorhub_path
from anchorhub.compatibility import get_path_separator
def test_one():
"""
main.py: Test defaults with local directory as input.
"""
main.main([get_anchorhub_path() + get_path_separator() +
'../sample/multi-file'])
|
0507dfbd23db74db1c59bd1084647cc49ef19aee
|
addons/website_notfound_redirect/ir_http.py
|
addons/website_notfound_redirect/ir_http.py
|
# -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.warning("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.warning("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.warning("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
|
# -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.info("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.info("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.info("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
|
Change logger messages to info
|
Change logger messages to info
|
Python
|
agpl-3.0
|
shingonoide/odoo_ezdoo,shingonoide/odoo_ezdoo
|
# -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.warning("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.warning("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.warning("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
Change logger messages to info
|
# -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.info("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.info("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.info("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
|
<commit_before># -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.warning("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.warning("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.warning("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
<commit_msg>Change logger messages to info<commit_after>
|
# -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.info("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.info("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.info("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
|
# -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.warning("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.warning("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.warning("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
Change logger messages to info# -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.info("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.info("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.info("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
|
<commit_before># -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.warning("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.warning("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.warning("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
<commit_msg>Change logger messages to info<commit_after># -*- coding: utf-8 -*-
import logging
import urllib2
from openerp.http import request
from openerp.osv import orm
logger = logging.getLogger(__name__)
class ir_http(orm.AbstractModel):
_inherit = 'ir.http'
def _handle_exception(self, exception, code=500):
code = getattr(exception, 'code', code)
if code == 404:
page = request.httprequest.path
logger.info("404 code... %s" % (page))
url = request.registry['ir.config_parameter'].get_param(request.cr,
request.uid, 'website.notfound_redirect_url')
if url:
url_request = "%s%s" % (url, page)
logger.info("The redirect url: %s" % (url_request))
try:
req = urllib2.Request(url_request)
request_old = urllib2.urlopen(req)
except (urllib2.HTTPError, urllib2.URLError):
request_old = False
else:
request_old = False
if not request_old:
logger.info("URL not found: %s" % (url_request))
return super(ir_http, self)._handle_exception(exception, code)
else:
logger.info("Redirect to %s" % (url_request))
return request.redirect(url_request, code=302)
return super(ir_http, self)._handle_exception(exception, code)
|
8af3aef367135dbbc55e573c6a943a86ff3ccd9d
|
survey/tests/locale/test_locale_normalization.py
|
survey/tests/locale/test_locale_normalization.py
|
import os
import platform
import subprocess
import unittest
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = "survey/locale/"
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
|
import os
import platform
import subprocess
import unittest
from pathlib import Path
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = Path("survey", "locale").absolute()
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
|
Use an absolute Path for localization tests
|
Use an absolute Path for localization tests
|
Python
|
agpl-3.0
|
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
|
import os
import platform
import subprocess
import unittest
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = "survey/locale/"
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
Use an absolute Path for localization tests
|
import os
import platform
import subprocess
import unittest
from pathlib import Path
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = Path("survey", "locale").absolute()
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
|
<commit_before>import os
import platform
import subprocess
import unittest
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = "survey/locale/"
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
<commit_msg>Use an absolute Path for localization tests<commit_after>
|
import os
import platform
import subprocess
import unittest
from pathlib import Path
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = Path("survey", "locale").absolute()
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
|
import os
import platform
import subprocess
import unittest
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = "survey/locale/"
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
Use an absolute Path for localization testsimport os
import platform
import subprocess
import unittest
from pathlib import Path
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = Path("survey", "locale").absolute()
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
|
<commit_before>import os
import platform
import subprocess
import unittest
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = "survey/locale/"
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
<commit_msg>Use an absolute Path for localization tests<commit_after>import os
import platform
import subprocess
import unittest
from pathlib import Path
class TestLocaleNormalization(unittest.TestCase):
LOCALE_PATH = Path("survey", "locale").absolute()
def test_normalization(self):
""" We test if the messages were properly created with makemessages --no-obsolete --no-wrap. """
if platform.system() == "Windows":
python_3 = ["py", "-3"]
else:
python_3 = ["python3"]
makemessages_command = python_3 + [
"manage.py",
"makemessages",
"--no-obsolete",
"--no-wrap",
"--ignore",
"venv",
]
number_of_language = len(os.listdir(self.LOCALE_PATH))
subprocess.check_call(makemessages_command)
git_diff_command = ["git", "diff", self.LOCALE_PATH]
git_diff = subprocess.check_output(git_diff_command).decode("utf8")
# In the diff we should have a change only for the date of the generation
# So 2 * @@ * number of language
number_of_change = git_diff.count("@@") / 2
msg = (
"You did not update the translation following your changes. Maybe you did not use the "
"normalized 'python3 manage.py makemessages --no-obsolete --no-wrap' ? If you're "
"working locally, just use 'git add {}', we launched it during tests.".format(self.LOCALE_PATH),
)
self.assertEqual(number_of_change, number_of_language, msg)
|
7a174e05108b673ae3e6a7b259ee8992b764e973
|
lintreview/tools/yamllint.py
|
lintreview/tools/yamllint.py
|
import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
for line in output:
filename, line, error = self._parse_line(line)
self.problems.add(filename, line, error)
def _parse_line(self, line):
"""
yamllint only generates results as stdout.
Parse the output for real data.
"""
parts = line.split(':', 3)
if len(parts) == 3:
message = parts[2].strip()
else:
message = parts[3].strip()
return (parts[0], int(parts[1]), message)
|
import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command, process_quickfix
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
process_quickfix(self.problems, output, lambda x: x)
|
Use more robust quickfix parser.
|
Use more robust quickfix parser.
The yamllint implementation had some issues that the common code does
not.
|
Python
|
mit
|
markstory/lint-review,markstory/lint-review,markstory/lint-review
|
import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
for line in output:
filename, line, error = self._parse_line(line)
self.problems.add(filename, line, error)
def _parse_line(self, line):
"""
yamllint only generates results as stdout.
Parse the output for real data.
"""
parts = line.split(':', 3)
if len(parts) == 3:
message = parts[2].strip()
else:
message = parts[3].strip()
return (parts[0], int(parts[1]), message)
Use more robust quickfix parser.
The yamllint implementation had some issues that the common code does
not.
|
import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command, process_quickfix
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
process_quickfix(self.problems, output, lambda x: x)
|
<commit_before>import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
for line in output:
filename, line, error = self._parse_line(line)
self.problems.add(filename, line, error)
def _parse_line(self, line):
"""
yamllint only generates results as stdout.
Parse the output for real data.
"""
parts = line.split(':', 3)
if len(parts) == 3:
message = parts[2].strip()
else:
message = parts[3].strip()
return (parts[0], int(parts[1]), message)
<commit_msg>Use more robust quickfix parser.
The yamllint implementation had some issues that the common code does
not.<commit_after>
|
import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command, process_quickfix
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
process_quickfix(self.problems, output, lambda x: x)
|
import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
for line in output:
filename, line, error = self._parse_line(line)
self.problems.add(filename, line, error)
def _parse_line(self, line):
"""
yamllint only generates results as stdout.
Parse the output for real data.
"""
parts = line.split(':', 3)
if len(parts) == 3:
message = parts[2].strip()
else:
message = parts[3].strip()
return (parts[0], int(parts[1]), message)
Use more robust quickfix parser.
The yamllint implementation had some issues that the common code does
not.import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command, process_quickfix
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
process_quickfix(self.problems, output, lambda x: x)
|
<commit_before>import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
for line in output:
filename, line, error = self._parse_line(line)
self.problems.add(filename, line, error)
def _parse_line(self, line):
"""
yamllint only generates results as stdout.
Parse the output for real data.
"""
parts = line.split(':', 3)
if len(parts) == 3:
message = parts[2].strip()
else:
message = parts[3].strip()
return (parts[0], int(parts[1]), message)
<commit_msg>Use more robust quickfix parser.
The yamllint implementation had some issues that the common code does
not.<commit_after>import os
import logging
from lintreview.tools import Tool
from lintreview.tools import run_command, process_quickfix
from lintreview.utils import in_path
log = logging.getLogger(__name__)
class Yamllint(Tool):
name = 'yamllint'
def check_dependencies(self):
"""
See if yamllint is on the PATH
"""
return in_path('yamllint')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext in ['.yml', '.yaml']
def process_files(self, files):
"""
Run code checks with yamllint.
Only a single process is made for all files
to save resources.
Configuration is not supported at this time
"""
log.debug('Processing %s files with %s', files, self.name)
command = ['yamllint', '--format=parsable']
# Add config file if its present
if self.options.get('config'):
command += ['-c', self.apply_base(self.options['config'])]
command += files
output = run_command(command, split=True, ignore_error=True)
if not output:
log.debug('No yamllint errors found.')
return False
process_quickfix(self.problems, output, lambda x: x)
|
9c9e564d51d44fb27101249d57d769828f14e97e
|
tests/integration/modules/test_win_dns_client.py
|
tests/integration/modules/test_win_dns_client.py
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
dns = '8.8.8.8'
interface = 'Ethernet'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
# Get a list of interfaces on the system
interfaces = self.run_function('network.interfaces_names')
skipIf(interfaces.count == 0, 'This test requires a network interface')
interface = interfaces[0]
dns = '8.8.8.8'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
|
Fix the failing dns test on Windows
|
Fix the failing dns test on Windows
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
dns = '8.8.8.8'
interface = 'Ethernet'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
Fix the failing dns test on Windows
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
# Get a list of interfaces on the system
interfaces = self.run_function('network.interfaces_names')
skipIf(interfaces.count == 0, 'This test requires a network interface')
interface = interfaces[0]
dns = '8.8.8.8'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
|
<commit_before># -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
dns = '8.8.8.8'
interface = 'Ethernet'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
<commit_msg>Fix the failing dns test on Windows<commit_after>
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
# Get a list of interfaces on the system
interfaces = self.run_function('network.interfaces_names')
skipIf(interfaces.count == 0, 'This test requires a network interface')
interface = interfaces[0]
dns = '8.8.8.8'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
|
# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
dns = '8.8.8.8'
interface = 'Ethernet'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
Fix the failing dns test on Windows# -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
# Get a list of interfaces on the system
interfaces = self.run_function('network.interfaces_names')
skipIf(interfaces.count == 0, 'This test requires a network interface')
interface = interfaces[0]
dns = '8.8.8.8'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
|
<commit_before># -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
dns = '8.8.8.8'
interface = 'Ethernet'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
<commit_msg>Fix the failing dns test on Windows<commit_after># -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.helpers import destructiveTest
# Import Salt libs
import salt.utils.platform
@skipIf(not salt.utils.platform.is_windows(), 'windows test only')
class WinDNSTest(ModuleCase):
'''
Test for salt.modules.win_dns_client
'''
@destructiveTest
def test_add_remove_dns(self):
'''
Test add and removing a dns server
'''
# Get a list of interfaces on the system
interfaces = self.run_function('network.interfaces_names')
skipIf(interfaces.count == 0, 'This test requires a network interface')
interface = interfaces[0]
dns = '8.8.8.8'
# add dns server
self.assertTrue(self.run_function('win_dns_client.add_dns', [dns, interface], index=42))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertIn(dns, srvs)
# remove dns server
self.assertTrue(self.run_function('win_dns_client.rm_dns', [dns], interface=interface))
srvs = self.run_function('win_dns_client.get_dns_servers', interface=interface)
self.assertNotIn(dns, srvs)
|
6f3336ef5dd43c02c851001715cf0f231c269276
|
pyramid_keystone/__init__.py
|
pyramid_keystone/__init__.py
|
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
|
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
# Allow the user to use our auth policy (recommended)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
# Add the keystone property to the request
config.add_request_method('.keystone.request_keystone', name='keystone', property=True, reify=True)
|
Add keystone to the request
|
Add keystone to the request
|
Python
|
isc
|
bertjwregeer/pyramid_keystone
|
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
Add keystone to the request
|
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
# Allow the user to use our auth policy (recommended)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
# Add the keystone property to the request
config.add_request_method('.keystone.request_keystone', name='keystone', property=True, reify=True)
|
<commit_before>
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
<commit_msg>Add keystone to the request<commit_after>
|
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
# Allow the user to use our auth policy (recommended)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
# Add the keystone property to the request
config.add_request_method('.keystone.request_keystone', name='keystone', property=True, reify=True)
|
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
Add keystone to the request
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
# Allow the user to use our auth policy (recommended)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
# Add the keystone property to the request
config.add_request_method('.keystone.request_keystone', name='keystone', property=True, reify=True)
|
<commit_before>
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
<commit_msg>Add keystone to the request<commit_after>
default_settings = [
('auth_url', str, 'http://localhost:5000/v3'),
('region', str, 'RegionOne'),
('user_domain_name', str, 'Default'),
('cacert', str, ''),
]
def parse_settings(settings):
parsed = {}
def populate(name, convert, default):
sname = '%s%s' % ('keystone.', name)
value = convert(settings.get(sname, default))
parsed[sname] = value
for name, convert, default in default_settings:
populate(name, convert, default)
return parsed
def includeme(config):
""" Set up standard configurator registrations. Use via:
.. code-block:: python
config = Configurator()
config.include('pyramid_keystone')
"""
# We use an action so that the user can include us, and then add the
# required variables, upon commit we will pick up those changes.
def register():
registry = config.registry
settings = parse_settings(registry.settings)
registry.settings.update(settings)
config.action('keystone-configure', register)
# Allow the user to use our auth policy (recommended)
config.add_directive('keystone_auth_policy', '.authentication.add_auth_policy')
# Add the keystone property to the request
config.add_request_method('.keystone.request_keystone', name='keystone', property=True, reify=True)
|
8a4819daa627f06e1a0eac87ab44176b7e2a0115
|
openerp/addons/openupgrade_records/lib/apriori.py
|
openerp/addons/openupgrade_records/lib/apriori.py
|
""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
|
""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
Correct renamed module names for bank-statement-import repository.
|
[FIX] Correct renamed module names for bank-statement-import repository.
|
Python
|
agpl-3.0
|
OpenUpgrade/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,OpenUpgrade/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade
|
""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
[FIX] Correct renamed module names for bank-statement-import repository.
|
""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
<commit_before>""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
<commit_msg>[FIX] Correct renamed module names for bank-statement-import repository.<commit_after>
|
""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
[FIX] Correct renamed module names for bank-statement-import repository.""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
<commit_before>""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'bank_statement_parse_camt',
'account_banking_nl_ing_mt940': 'bank_statement_parse_nl_ing_mt940',
'account_banking_nl_rabo_mt940': 'bank_statement_parse_nl_rabo_mt940',
}
renamed_models = {
}
<commit_msg>[FIX] Correct renamed module names for bank-statement-import repository.<commit_after>""" Encode any known changes to the database here
to help the matching process
"""
renamed_modules = {
'base_calendar': 'calendar',
'mrp_jit': 'procurement_jit',
'project_mrp': 'sale_service',
# OCA/account-invoicing
'invoice_validation_wkfl': 'account_invoice_validation_workflow',
'account_invoice_zero': 'account_invoice_zero_autopay',
# OCA/server-tools
'audittrail': 'auditlog',
# OCA/bank-statement-import
'account_banking': 'account_bank_statement_import',
'account_banking_camt': 'account_bank_statement_import_camt',
'account_banking_mt940':
'account_bank_statement_import_mt940_base',
'account_banking_nl_ing_mt940':
'account_bank_statement_import_mt940_nl_ing',
'account_banking_nl_rabo_mt940':
'account_bank_statement_import_mt940_nl_rabo',
}
renamed_models = {
}
|
3d48066c78d693b89cb2daabfd1ebe756862edc5
|
mopidy_gmusic/__init__.py
|
mopidy_gmusic/__init__.py
|
from __future__ import unicode_literals
import os
from mopidy import config, exceptions, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def validate_environment(self):
try:
import gmusicapi # noqa
except ImportError as e:
raise exceptions.ExtensionError('gmusicapi library not found', e)
pass
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
|
from __future__ import unicode_literals
import os
from mopidy import config, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
|
Remove dependency check done by Mopidy
|
Remove dependency check done by Mopidy
|
Python
|
apache-2.0
|
hechtus/mopidy-gmusic,jaapz/mopidy-gmusic,Tilley/mopidy-gmusic,elrosti/mopidy-gmusic,jodal/mopidy-gmusic,jaibot/mopidy-gmusic,mopidy/mopidy-gmusic
|
from __future__ import unicode_literals
import os
from mopidy import config, exceptions, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def validate_environment(self):
try:
import gmusicapi # noqa
except ImportError as e:
raise exceptions.ExtensionError('gmusicapi library not found', e)
pass
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
Remove dependency check done by Mopidy
|
from __future__ import unicode_literals
import os
from mopidy import config, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
|
<commit_before>from __future__ import unicode_literals
import os
from mopidy import config, exceptions, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def validate_environment(self):
try:
import gmusicapi # noqa
except ImportError as e:
raise exceptions.ExtensionError('gmusicapi library not found', e)
pass
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
<commit_msg>Remove dependency check done by Mopidy<commit_after>
|
from __future__ import unicode_literals
import os
from mopidy import config, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
|
from __future__ import unicode_literals
import os
from mopidy import config, exceptions, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def validate_environment(self):
try:
import gmusicapi # noqa
except ImportError as e:
raise exceptions.ExtensionError('gmusicapi library not found', e)
pass
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
Remove dependency check done by Mopidyfrom __future__ import unicode_literals
import os
from mopidy import config, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
|
<commit_before>from __future__ import unicode_literals
import os
from mopidy import config, exceptions, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def validate_environment(self):
try:
import gmusicapi # noqa
except ImportError as e:
raise exceptions.ExtensionError('gmusicapi library not found', e)
pass
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
<commit_msg>Remove dependency check done by Mopidy<commit_after>from __future__ import unicode_literals
import os
from mopidy import config, ext
__version__ = '0.2.2'
class GMusicExtension(ext.Extension):
dist_name = 'Mopidy-GMusic'
ext_name = 'gmusic'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(GMusicExtension, self).get_config_schema()
schema['username'] = config.String()
schema['password'] = config.Secret()
schema['deviceid'] = config.String(optional=True)
return schema
def get_backend_classes(self):
from .actor import GMusicBackend
return [GMusicBackend]
|
e7bf5e84629daffd2a625759addf4eea8423e115
|
dataportal/broker/__init__.py
|
dataportal/broker/__init__.py
|
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
|
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError, fill_event)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
|
Put fill_event in the public API.
|
API: Put fill_event in the public API.
|
Python
|
bsd-3-clause
|
NSLS-II/dataportal,NSLS-II/dataportal,danielballan/datamuxer,danielballan/dataportal,ericdill/datamuxer,danielballan/datamuxer,NSLS-II/datamuxer,ericdill/databroker,tacaswell/dataportal,ericdill/datamuxer,ericdill/databroker,danielballan/dataportal,tacaswell/dataportal
|
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
API: Put fill_event in the public API.
|
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError, fill_event)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
|
<commit_before>from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
<commit_msg>API: Put fill_event in the public API.<commit_after>
|
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError, fill_event)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
|
from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
API: Put fill_event in the public API.from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError, fill_event)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
|
<commit_before>from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
<commit_msg>API: Put fill_event in the public API.<commit_after>from .simple_broker import (_DataBrokerClass, EventQueue, Header,
LocationError, IntegrityError, fill_event)
from .handler_registration import register_builtin_handlers
DataBroker = _DataBrokerClass() # singleton, used by pims_readers import below
from .pims_readers import Images, SubtractedImages
register_builtin_handlers()
|
75726945934a049c9fc81066996f1670f29ead2c
|
test/long_test.py
|
test/long_test.py
|
import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', None) is not None
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
|
import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', '').lower().startswith('t')
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
|
Improve handling of SKIP_LONG_TESTS build variable.
|
Improve handling of SKIP_LONG_TESTS build variable.
|
Python
|
mit
|
rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel
|
import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', None) is not None
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
Improve handling of SKIP_LONG_TESTS build variable.
|
import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', '').lower().startswith('t')
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
|
<commit_before>import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', None) is not None
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
<commit_msg>Improve handling of SKIP_LONG_TESTS build variable.<commit_after>
|
import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', '').lower().startswith('t')
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
|
import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', None) is not None
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
Improve handling of SKIP_LONG_TESTS build variable.import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', '').lower().startswith('t')
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
|
<commit_before>import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', None) is not None
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
<commit_msg>Improve handling of SKIP_LONG_TESTS build variable.<commit_after>import os, unittest
"""This module long_test provides a decorator, @long_test, that you can use to
mark tests which take a lot of wall clock time.
If the system environment variable SKIP_LONG_TESTS is set, tests decorated
with @long_test will not be run.
"""
SKIP_LONG_TESTS = os.getenv('SKIP_LONG_TESTS', '').lower().startswith('t')
long_test = unittest.skipIf(SKIP_LONG_TESTS, 'Long tests skipped.')
|
3c077d82881e3dd51eb0b3906e43f9e038346cb6
|
tensorflow_federated/python/core/test/__init__.py
|
tensorflow_federated/python/core/test/__init__.py
|
# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
# Used by doc generation script.
_allowed_symbols = [
"ReferenceExecutor",
"assert_contains_secure_aggregation",
"assert_contains_unsecure_aggregation",
"assert_not_contains_secure_aggregation",
"assert_not_contains_unsecure_aggregation",
]
|
# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
|
Remove `_allowed_symbols`, this is no longer used by the document generation.
|
Remove `_allowed_symbols`, this is no longer used by the document generation.
PiperOrigin-RevId: 321657180
|
Python
|
apache-2.0
|
tensorflow/federated,tensorflow/federated,tensorflow/federated
|
# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
# Used by doc generation script.
_allowed_symbols = [
"ReferenceExecutor",
"assert_contains_secure_aggregation",
"assert_contains_unsecure_aggregation",
"assert_not_contains_secure_aggregation",
"assert_not_contains_unsecure_aggregation",
]
Remove `_allowed_symbols`, this is no longer used by the document generation.
PiperOrigin-RevId: 321657180
|
# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
|
<commit_before># Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
# Used by doc generation script.
_allowed_symbols = [
"ReferenceExecutor",
"assert_contains_secure_aggregation",
"assert_contains_unsecure_aggregation",
"assert_not_contains_secure_aggregation",
"assert_not_contains_unsecure_aggregation",
]
<commit_msg>Remove `_allowed_symbols`, this is no longer used by the document generation.
PiperOrigin-RevId: 321657180<commit_after>
|
# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
|
# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
# Used by doc generation script.
_allowed_symbols = [
"ReferenceExecutor",
"assert_contains_secure_aggregation",
"assert_contains_unsecure_aggregation",
"assert_not_contains_secure_aggregation",
"assert_not_contains_unsecure_aggregation",
]
Remove `_allowed_symbols`, this is no longer used by the document generation.
PiperOrigin-RevId: 321657180# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
|
<commit_before># Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
# Used by doc generation script.
_allowed_symbols = [
"ReferenceExecutor",
"assert_contains_secure_aggregation",
"assert_contains_unsecure_aggregation",
"assert_not_contains_secure_aggregation",
"assert_not_contains_unsecure_aggregation",
]
<commit_msg>Remove `_allowed_symbols`, this is no longer used by the document generation.
PiperOrigin-RevId: 321657180<commit_after># Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test classes/functions for testing usage of TFF.
This package contains functions and utilities that can make testing usage of TFF
easier or more convenient and parallels the `tf.test` package.
"""
from tensorflow_federated.python.core.impl.reference_executor import ReferenceExecutor
from tensorflow_federated.python.core.test.static_assert import assert_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_contains_unsecure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_secure_aggregation
from tensorflow_federated.python.core.test.static_assert import assert_not_contains_unsecure_aggregation
|
64086acee22cfc2dde2fec9da1ea1b7745ce3d85
|
tests/misc/test_base_model.py
|
tests/misc/test_base_model.py
|
# -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_repr(self):
self.generate_fixture_project_status()
self.generate_fixture_project()
self.assertEqual(str(self.project), "<Project %s>" % self.project.name)
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
|
# -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
|
Remove useless test about model representation
|
Remove useless test about model representation
|
Python
|
agpl-3.0
|
cgwire/zou
|
# -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_repr(self):
self.generate_fixture_project_status()
self.generate_fixture_project()
self.assertEqual(str(self.project), "<Project %s>" % self.project.name)
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
Remove useless test about model representation
|
# -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
|
<commit_before># -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_repr(self):
self.generate_fixture_project_status()
self.generate_fixture_project()
self.assertEqual(str(self.project), "<Project %s>" % self.project.name)
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
<commit_msg>Remove useless test about model representation<commit_after>
|
# -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
|
# -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_repr(self):
self.generate_fixture_project_status()
self.generate_fixture_project()
self.assertEqual(str(self.project), "<Project %s>" % self.project.name)
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
Remove useless test about model representation# -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
|
<commit_before># -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_repr(self):
self.generate_fixture_project_status()
self.generate_fixture_project()
self.assertEqual(str(self.project), "<Project %s>" % self.project.name)
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
<commit_msg>Remove useless test about model representation<commit_after># -*- coding: UTF-8 -*-
from tests.base import ApiDBTestCase
class BaseModelTestCase(ApiDBTestCase):
def test_query(self):
pass
def test_get(self):
pass
def test_get_by(self):
pass
def test_get_all_by(self):
pass
def test_create(self):
pass
def test_get_id_map(self):
pass
def save(self):
pass
def delete(self):
pass
def update(self):
pass
|
7000e89828ec82f8e5c26c39ac290cb329036e17
|
tests/test_core/test_cache.py
|
tests/test_core/test_cache.py
|
import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main()
|
import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_cache_update(self):
c = Cache()
c["path"] = 2
c.apply_changes([[], {123:"test"}])
self.assertEqual("test", c[123])
with self.assertRaises(KeyError):
c["path"]
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main(verbosity=2)
|
Test Cache root level dictionary update via apply_changes
|
Test Cache root level dictionary update via apply_changes
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main()
Test Cache root level dictionary update via apply_changes
|
import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_cache_update(self):
c = Cache()
c["path"] = 2
c.apply_changes([[], {123:"test"}])
self.assertEqual("test", c[123])
with self.assertRaises(KeyError):
c["path"]
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main(verbosity=2)
|
<commit_before>import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main()
<commit_msg>Test Cache root level dictionary update via apply_changes<commit_after>
|
import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_cache_update(self):
c = Cache()
c["path"] = 2
c.apply_changes([[], {123:"test"}])
self.assertEqual("test", c[123])
with self.assertRaises(KeyError):
c["path"]
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main(verbosity=2)
|
import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main()
Test Cache root level dictionary update via apply_changesimport os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_cache_update(self):
c = Cache()
c["path"] = 2
c.apply_changes([[], {123:"test"}])
self.assertEqual("test", c[123])
with self.assertRaises(KeyError):
c["path"]
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main(verbosity=2)
|
<commit_before>import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main()
<commit_msg>Test Cache root level dictionary update via apply_changes<commit_after>import os
import sys
import unittest
from collections import OrderedDict
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
# import logging
# logging.basicConfig(level=logging.DEBUG)
import setup_malcolm_paths
from mock import MagicMock
# module imports
from malcolm.core.cache import Cache
class TestProcess(unittest.TestCase):
def test_addition(self):
c = Cache()
c.apply_changes([["thing"], {1: 2}])
self.assertEqual(c["thing"][1], 2)
def test_deletion(self):
c = Cache()
c["path"] = 2
c.apply_changes([["path"]])
self.assertEqual(list(c), [])
def test_change(self):
c = Cache()
c[1] = 3
c.apply_changes([["path"], 4])
self.assertEqual(c["path"], 4)
def test_cache_update(self):
c = Cache()
c["path"] = 2
c.apply_changes([[], {123:"test"}])
self.assertEqual("test", c[123])
with self.assertRaises(KeyError):
c["path"]
def test_non_string_path_errors(self):
c = Cache()
self.assertRaises(AssertionError, c.apply_changes, [[1], 3])
def test_walk_path(self):
c = Cache()
c[1] = {2: {3: "end"}}
walked = c.walk_path([1, 2, 3])
self.assertEqual(walked, "end")
if __name__ == "__main__":
unittest.main(verbosity=2)
|
7310c2ce4b8ccd69374a85877c2df97a2b6ade70
|
nap/dataviews/views.py
|
nap/dataviews/views.py
|
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _field_names(self):
return tuple(
name
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
)
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _update(self, data):
'''
Update an instance from supplied data.
'''
errors = defaultdict(list)
for name in self._field_names:
if name in data:
try:
setattr(self, name, data[name])
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
|
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _fields(self):
return {
name: prop
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
}
@cached_property
def _field_names(self):
return tuple(self._fields.keys())
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _apply(self, data, update=False):
'''
Update an instance from supplied data.
If update is False, all fields not tagged as ._required=False MUST be
supplied in the data dict.
'''
errors = defaultdict(list)
for name in self._field_names:
try:
setattr(self, name, data[name])
except KeyError:
if self.update:
pass
if getattr(self._fields[name], '_required', True):
errors[name].append(
ValidationError('This field is required')
)
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
|
Add _fields cache Change _update to _apply and add option for non-required fields
|
Add _fields cache
Change _update to _apply and add option for non-required fields
|
Python
|
bsd-3-clause
|
limbera/django-nap,MarkusH/django-nap
|
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _field_names(self):
return tuple(
name
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
)
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _update(self, data):
'''
Update an instance from supplied data.
'''
errors = defaultdict(list)
for name in self._field_names:
if name in data:
try:
setattr(self, name, data[name])
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
Add _fields cache
Change _update to _apply and add option for non-required fields
|
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _fields(self):
return {
name: prop
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
}
@cached_property
def _field_names(self):
return tuple(self._fields.keys())
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _apply(self, data, update=False):
'''
Update an instance from supplied data.
If update is False, all fields not tagged as ._required=False MUST be
supplied in the data dict.
'''
errors = defaultdict(list)
for name in self._field_names:
try:
setattr(self, name, data[name])
except KeyError:
if self.update:
pass
if getattr(self._fields[name], '_required', True):
errors[name].append(
ValidationError('This field is required')
)
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
|
<commit_before>
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _field_names(self):
return tuple(
name
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
)
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _update(self, data):
'''
Update an instance from supplied data.
'''
errors = defaultdict(list)
for name in self._field_names:
if name in data:
try:
setattr(self, name, data[name])
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
<commit_msg>Add _fields cache
Change _update to _apply and add option for non-required fields<commit_after>
|
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _fields(self):
return {
name: prop
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
}
@cached_property
def _field_names(self):
return tuple(self._fields.keys())
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _apply(self, data, update=False):
'''
Update an instance from supplied data.
If update is False, all fields not tagged as ._required=False MUST be
supplied in the data dict.
'''
errors = defaultdict(list)
for name in self._field_names:
try:
setattr(self, name, data[name])
except KeyError:
if self.update:
pass
if getattr(self._fields[name], '_required', True):
errors[name].append(
ValidationError('This field is required')
)
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
|
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _field_names(self):
return tuple(
name
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
)
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _update(self, data):
'''
Update an instance from supplied data.
'''
errors = defaultdict(list)
for name in self._field_names:
if name in data:
try:
setattr(self, name, data[name])
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
Add _fields cache
Change _update to _apply and add option for non-required fields
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _fields(self):
return {
name: prop
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
}
@cached_property
def _field_names(self):
return tuple(self._fields.keys())
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _apply(self, data, update=False):
'''
Update an instance from supplied data.
If update is False, all fields not tagged as ._required=False MUST be
supplied in the data dict.
'''
errors = defaultdict(list)
for name in self._field_names:
try:
setattr(self, name, data[name])
except KeyError:
if self.update:
pass
if getattr(self._fields[name], '_required', True):
errors[name].append(
ValidationError('This field is required')
)
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
|
<commit_before>
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _field_names(self):
return tuple(
name
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
)
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _update(self, data):
'''
Update an instance from supplied data.
'''
errors = defaultdict(list)
for name in self._field_names:
if name in data:
try:
setattr(self, name, data[name])
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
<commit_msg>Add _fields cache
Change _update to _apply and add option for non-required fields<commit_after>
from collections import defaultdict
from inspect import classify_class_attrs
from django.forms import ValidationError
from django.utils.functional import cached_property
from .fields import field
from .utils import DictObject
class DataView(object):
def __init__(self, obj=None, **kwargs):
if obj is None:
obj = DictObject()
self._obj = obj
self._kwargs = kwargs
@cached_property
def _fields(self):
return {
name: prop
for name, kind, cls, prop in classify_class_attrs(self.__class__)
if isinstance(prop, field)
}
@cached_property
def _field_names(self):
return tuple(self._fields.keys())
def _reduce(self):
'''
Reduce our instance to its serialisable state.
Returns a dict.
'''
return {
name: getattr(self, name)
for name in self._field_names
}
def _apply(self, data, update=False):
'''
Update an instance from supplied data.
If update is False, all fields not tagged as ._required=False MUST be
supplied in the data dict.
'''
errors = defaultdict(list)
for name in self._field_names:
try:
setattr(self, name, data[name])
except KeyError:
if self.update:
pass
if getattr(self._fields[name], '_required', True):
errors[name].append(
ValidationError('This field is required')
)
except ValidationError as e:
errors[name].append(e.message)
self._errors = dict(errors)
if errors:
raise ValidationError(self._errors)
return self._obj
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.