commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bfd1ef748e9d29cf4abccac03098d1d369e6be12
|
spyder_memory_profiler/__init__.py
|
spyder_memory_profiler/__init__.py
|
# -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
|
# -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
|
Change version number to 0.2.1
|
Change version number to 0.2.1
|
Python
|
mit
|
spyder-ide/spyder.memory_profiler
|
# -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
Change version number to 0.2.1
|
# -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
<commit_msg>Change version number to 0.2.1<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
|
# -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
Change version number to 0.2.1# -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1.dev0'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
<commit_msg>Change version number to 0.2.1<commit_after># -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
__version__ = '0.2.1'
# =============================================================================
# The following statements are required to register this 3rd party plugin:
# =============================================================================
from .memoryprofiler import MemoryProfiler
PLUGIN_CLASS = MemoryProfiler
|
ac5ceee751b0c374ffcf1bd0e52ce085e8d7295c
|
nyucal/cli.py
|
nyucal/cli.py
|
# -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("cli for nyucal")
@main.command()
def list(source=None):
"""List the available calendars in the calendar source"""
if source is None:
source = nyucal.SOURCE_URL # noqa
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
pass
@main.command()
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
def list(source):
"""List the available calendars in the calendar source
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
@main.command()
@click.argument('name', nargs=1)
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
@click.option('--format', '-f',
type=click.Choice(['gcalcsv']),
default='gcalcsv',
help='Write in this format')
@click.option('--output', '-o', type=click.File('w'), default='-',
help='Write to this file (default: stdout)')
def get(source, name, format, output):
"""Get the calendar named NAME and output in the specified format
If NAME contains a space, it will need to be quoted.
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
calendar = store.calendar(name)
writers = {'gcalcsv': nyucal.GcalCsvWriter}
writer = writers[format.lower()](output)
writer.write(calendar)
if __name__ == "__main__":
main()
|
Add the `get` command to the CLI.
|
Add the `get` command to the CLI.
|
Python
|
mit
|
nyumathclinic/nyucal,nyumathclinic/nyucal
|
# -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("cli for nyucal")
@main.command()
def list(source=None):
"""List the available calendars in the calendar source"""
if source is None:
source = nyucal.SOURCE_URL # noqa
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
if __name__ == "__main__":
main()
Add the `get` command to the CLI.
|
# -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
pass
@main.command()
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
def list(source):
"""List the available calendars in the calendar source
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
@main.command()
@click.argument('name', nargs=1)
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
@click.option('--format', '-f',
type=click.Choice(['gcalcsv']),
default='gcalcsv',
help='Write in this format')
@click.option('--output', '-o', type=click.File('w'), default='-',
help='Write to this file (default: stdout)')
def get(source, name, format, output):
"""Get the calendar named NAME and output in the specified format
If NAME contains a space, it will need to be quoted.
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
calendar = store.calendar(name)
writers = {'gcalcsv': nyucal.GcalCsvWriter}
writer = writers[format.lower()](output)
writer.write(calendar)
if __name__ == "__main__":
main()
|
<commit_before># -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("cli for nyucal")
@main.command()
def list(source=None):
"""List the available calendars in the calendar source"""
if source is None:
source = nyucal.SOURCE_URL # noqa
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
if __name__ == "__main__":
main()
<commit_msg>Add the `get` command to the CLI.<commit_after>
|
# -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
pass
@main.command()
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
def list(source):
"""List the available calendars in the calendar source
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
@main.command()
@click.argument('name', nargs=1)
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
@click.option('--format', '-f',
type=click.Choice(['gcalcsv']),
default='gcalcsv',
help='Write in this format')
@click.option('--output', '-o', type=click.File('w'), default='-',
help='Write to this file (default: stdout)')
def get(source, name, format, output):
"""Get the calendar named NAME and output in the specified format
If NAME contains a space, it will need to be quoted.
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
calendar = store.calendar(name)
writers = {'gcalcsv': nyucal.GcalCsvWriter}
writer = writers[format.lower()](output)
writer.write(calendar)
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("cli for nyucal")
@main.command()
def list(source=None):
"""List the available calendars in the calendar source"""
if source is None:
source = nyucal.SOURCE_URL # noqa
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
if __name__ == "__main__":
main()
Add the `get` command to the CLI.# -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
pass
@main.command()
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
def list(source):
"""List the available calendars in the calendar source
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
@main.command()
@click.argument('name', nargs=1)
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
@click.option('--format', '-f',
type=click.Choice(['gcalcsv']),
default='gcalcsv',
help='Write in this format')
@click.option('--output', '-o', type=click.File('w'), default='-',
help='Write to this file (default: stdout)')
def get(source, name, format, output):
"""Get the calendar named NAME and output in the specified format
If NAME contains a space, it will need to be quoted.
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
calendar = store.calendar(name)
writers = {'gcalcsv': nyucal.GcalCsvWriter}
writer = writers[format.lower()](output)
writer.write(calendar)
if __name__ == "__main__":
main()
|
<commit_before># -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("cli for nyucal")
@main.command()
def list(source=None):
"""List the available calendars in the calendar source"""
if source is None:
source = nyucal.SOURCE_URL # noqa
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
if __name__ == "__main__":
main()
<commit_msg>Add the `get` command to the CLI.<commit_after># -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
pass
@main.command()
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
def list(source):
"""List the available calendars in the calendar source
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
@main.command()
@click.argument('name', nargs=1)
@click.option('--source', '-s', default=nyucal.SOURCE_URL,
help="""Calendars source (URL, file path, or string).
(default: {} """.format(nyucal.SOURCE_URL))
@click.option('--format', '-f',
type=click.Choice(['gcalcsv']),
default='gcalcsv',
help='Write in this format')
@click.option('--output', '-o', type=click.File('w'), default='-',
help='Write to this file (default: stdout)')
def get(source, name, format, output):
"""Get the calendar named NAME and output in the specified format
If NAME contains a space, it will need to be quoted.
Since the calendar store is, by default, scraped from a web page,
this command will fail if no source is specified and the computer
is not online.
"""
store = nyucal.CalendarStore(source)
calendar = store.calendar(name)
writers = {'gcalcsv': nyucal.GcalCsvWriter}
writer = writers[format.lower()](output)
writer.write(calendar)
if __name__ == "__main__":
main()
|
019bca440c46039954a6228bbd22f79a5449aecd
|
custom/fri/management/commands/dump_fri_message_bank.py
|
custom/fri/management/commands/dump_fri_message_bank.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from couchexport.export import export_raw
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from io import open
class Command(BaseCommand):
def write_result_to_file(self, model_name, result, fields):
with open('%s.xlsx' % model_name, 'wb') as f:
headers = fields
excel_data = []
for obj in result:
excel_data.append((getattr(obj, field) for field in fields))
export_raw(
((model_name, headers), ),
((model_name, excel_data), ),
f
)
def handle(self, **options):
self.write_result_to_file(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all(),
('_id', 'domain', 'risk_profile', 'message', 'fri_id')
)
self.write_result_to_file(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all(),
('_id', 'domain', 'case_id', 'message_bank_message_id', 'order')
)
self.write_result_to_file(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all(),
('_id', 'domain', 'message_id', 'message')
)
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from six import moves
class Command(BaseCommand):
def delete_docs(self, model_name, result):
print("\nHandling %s" % model_name)
result = list(result)
answer = moves.input("Delete %s docs? y/n" % len(result))
if answer == 'y':
count = 0
for doc in result:
if doc.doc_type != model_name:
print("Deleted %s docs" % count)
raise ValueError("Expected %s, got %s" % (model_name, doc.doc_type))
doc.delete()
count += 1
print("Deleted %s docs" % count)
def handle(self, **options):
self.delete_docs(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all()
)
self.delete_docs(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all()
)
self.delete_docs(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all()
)
|
Update fri dump script to delete docs
|
Update fri dump script to delete docs
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import
from __future__ import unicode_literals
from couchexport.export import export_raw
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from io import open
class Command(BaseCommand):
def write_result_to_file(self, model_name, result, fields):
with open('%s.xlsx' % model_name, 'wb') as f:
headers = fields
excel_data = []
for obj in result:
excel_data.append((getattr(obj, field) for field in fields))
export_raw(
((model_name, headers), ),
((model_name, excel_data), ),
f
)
def handle(self, **options):
self.write_result_to_file(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all(),
('_id', 'domain', 'risk_profile', 'message', 'fri_id')
)
self.write_result_to_file(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all(),
('_id', 'domain', 'case_id', 'message_bank_message_id', 'order')
)
self.write_result_to_file(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all(),
('_id', 'domain', 'message_id', 'message')
)
Update fri dump script to delete docs
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from six import moves
class Command(BaseCommand):
def delete_docs(self, model_name, result):
print("\nHandling %s" % model_name)
result = list(result)
answer = moves.input("Delete %s docs? y/n" % len(result))
if answer == 'y':
count = 0
for doc in result:
if doc.doc_type != model_name:
print("Deleted %s docs" % count)
raise ValueError("Expected %s, got %s" % (model_name, doc.doc_type))
doc.delete()
count += 1
print("Deleted %s docs" % count)
def handle(self, **options):
self.delete_docs(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all()
)
self.delete_docs(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all()
)
self.delete_docs(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all()
)
|
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from couchexport.export import export_raw
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from io import open
class Command(BaseCommand):
def write_result_to_file(self, model_name, result, fields):
with open('%s.xlsx' % model_name, 'wb') as f:
headers = fields
excel_data = []
for obj in result:
excel_data.append((getattr(obj, field) for field in fields))
export_raw(
((model_name, headers), ),
((model_name, excel_data), ),
f
)
def handle(self, **options):
self.write_result_to_file(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all(),
('_id', 'domain', 'risk_profile', 'message', 'fri_id')
)
self.write_result_to_file(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all(),
('_id', 'domain', 'case_id', 'message_bank_message_id', 'order')
)
self.write_result_to_file(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all(),
('_id', 'domain', 'message_id', 'message')
)
<commit_msg>Update fri dump script to delete docs<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from six import moves
class Command(BaseCommand):
def delete_docs(self, model_name, result):
print("\nHandling %s" % model_name)
result = list(result)
answer = moves.input("Delete %s docs? y/n" % len(result))
if answer == 'y':
count = 0
for doc in result:
if doc.doc_type != model_name:
print("Deleted %s docs" % count)
raise ValueError("Expected %s, got %s" % (model_name, doc.doc_type))
doc.delete()
count += 1
print("Deleted %s docs" % count)
def handle(self, **options):
self.delete_docs(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all()
)
self.delete_docs(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all()
)
self.delete_docs(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all()
)
|
from __future__ import absolute_import
from __future__ import unicode_literals
from couchexport.export import export_raw
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from io import open
class Command(BaseCommand):
def write_result_to_file(self, model_name, result, fields):
with open('%s.xlsx' % model_name, 'wb') as f:
headers = fields
excel_data = []
for obj in result:
excel_data.append((getattr(obj, field) for field in fields))
export_raw(
((model_name, headers), ),
((model_name, excel_data), ),
f
)
def handle(self, **options):
self.write_result_to_file(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all(),
('_id', 'domain', 'risk_profile', 'message', 'fri_id')
)
self.write_result_to_file(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all(),
('_id', 'domain', 'case_id', 'message_bank_message_id', 'order')
)
self.write_result_to_file(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all(),
('_id', 'domain', 'message_id', 'message')
)
Update fri dump script to delete docsfrom __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from six import moves
class Command(BaseCommand):
def delete_docs(self, model_name, result):
print("\nHandling %s" % model_name)
result = list(result)
answer = moves.input("Delete %s docs? y/n" % len(result))
if answer == 'y':
count = 0
for doc in result:
if doc.doc_type != model_name:
print("Deleted %s docs" % count)
raise ValueError("Expected %s, got %s" % (model_name, doc.doc_type))
doc.delete()
count += 1
print("Deleted %s docs" % count)
def handle(self, **options):
self.delete_docs(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all()
)
self.delete_docs(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all()
)
self.delete_docs(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all()
)
|
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from couchexport.export import export_raw
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from io import open
class Command(BaseCommand):
def write_result_to_file(self, model_name, result, fields):
with open('%s.xlsx' % model_name, 'wb') as f:
headers = fields
excel_data = []
for obj in result:
excel_data.append((getattr(obj, field) for field in fields))
export_raw(
((model_name, headers), ),
((model_name, excel_data), ),
f
)
def handle(self, **options):
self.write_result_to_file(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all(),
('_id', 'domain', 'risk_profile', 'message', 'fri_id')
)
self.write_result_to_file(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all(),
('_id', 'domain', 'case_id', 'message_bank_message_id', 'order')
)
self.write_result_to_file(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all(),
('_id', 'domain', 'message_id', 'message')
)
<commit_msg>Update fri dump script to delete docs<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from custom.fri.models import FRIMessageBankMessage, FRIRandomizedMessage, FRIExtraMessage
from django.core.management.base import BaseCommand
from six import moves
class Command(BaseCommand):
def delete_docs(self, model_name, result):
print("\nHandling %s" % model_name)
result = list(result)
answer = moves.input("Delete %s docs? y/n" % len(result))
if answer == 'y':
count = 0
for doc in result:
if doc.doc_type != model_name:
print("Deleted %s docs" % count)
raise ValueError("Expected %s, got %s" % (model_name, doc.doc_type))
doc.delete()
count += 1
print("Deleted %s docs" % count)
def handle(self, **options):
self.delete_docs(
'FRIMessageBankMessage',
FRIMessageBankMessage.view('fri/message_bank', include_docs=True).all()
)
self.delete_docs(
'FRIRandomizedMessage',
FRIRandomizedMessage.view('fri/randomized_message', include_docs=True).all()
)
self.delete_docs(
'FRIExtraMessage',
FRIExtraMessage.view('fri/extra_message', include_docs=True).all()
)
|
be82f1beac54f46fe9458c3ca26b8e3b786bc9f5
|
web/impact/impact/views/general_view_set.py
|
web/impact/impact/views/general_view_set.py
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
model_attribute_calls = [
'Startup_additional_industries',
'Startup_recommendation_tags',
'StartupLabel_startups',
'RefundCode_programs',
'UserLabel_users',
'Observer_newsletter_cc_roles',
'ExpertProfile_functional_expertise',
'ExpertProfile_additional_industries',
'ExpertProfile_mentoring_specialties',
'Newsletter_recipient_roles',
'Section_interest_categories',
]
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
if self.kwargs['model'] in model_attribute_calls:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=self.kwargs['model'])
else:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
|
Create List of Model Attribute API Calls That Require Snake Case
|
[AC-5010] Create List of Model Attribute API Calls That Require Snake Case
This commit creates a list of models that use attributes. Those calls have snake case in their model definition, and were breaking when converted back to camel case. This may not be the most efficient way to do this, but it is currently working.
|
Python
|
mit
|
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
[AC-5010] Create List of Model Attribute API Calls That Require Snake Case
This commit creates a list of models that use attributes. Those calls have snake case in their model definition, and were breaking when converted back to camel case. This may not be the most efficient way to do this, but it is currently working.
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
model_attribute_calls = [
'Startup_additional_industries',
'Startup_recommendation_tags',
'StartupLabel_startups',
'RefundCode_programs',
'UserLabel_users',
'Observer_newsletter_cc_roles',
'ExpertProfile_functional_expertise',
'ExpertProfile_additional_industries',
'ExpertProfile_mentoring_specialties',
'Newsletter_recipient_roles',
'Section_interest_categories',
]
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
if self.kwargs['model'] in model_attribute_calls:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=self.kwargs['model'])
else:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
|
<commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
<commit_msg>[AC-5010] Create List of Model Attribute API Calls That Require Snake Case
This commit creates a list of models that use attributes. Those calls have snake case in their model definition, and were breaking when converted back to camel case. This may not be the most efficient way to do this, but it is currently working.<commit_after>
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
model_attribute_calls = [
'Startup_additional_industries',
'Startup_recommendation_tags',
'StartupLabel_startups',
'RefundCode_programs',
'UserLabel_users',
'Observer_newsletter_cc_roles',
'ExpertProfile_functional_expertise',
'ExpertProfile_additional_industries',
'ExpertProfile_mentoring_specialties',
'Newsletter_recipient_roles',
'Section_interest_categories',
]
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
if self.kwargs['model'] in model_attribute_calls:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=self.kwargs['model'])
else:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
|
# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
[AC-5010] Create List of Model Attribute API Calls That Require Snake Case
This commit creates a list of models that use attributes. Those calls have snake case in their model definition, and were breaking when converted back to camel case. This may not be the most efficient way to do this, but it is currently working.# MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
model_attribute_calls = [
'Startup_additional_industries',
'Startup_recommendation_tags',
'StartupLabel_startups',
'RefundCode_programs',
'UserLabel_users',
'Observer_newsletter_cc_roles',
'ExpertProfile_functional_expertise',
'ExpertProfile_additional_industries',
'ExpertProfile_mentoring_specialties',
'Newsletter_recipient_roles',
'Section_interest_categories',
]
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
if self.kwargs['model'] in model_attribute_calls:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=self.kwargs['model'])
else:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
|
<commit_before># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
<commit_msg>[AC-5010] Create List of Model Attribute API Calls That Require Snake Case
This commit creates a list of models that use attributes. Those calls have snake case in their model definition, and were breaking when converted back to camel case. This may not be the most efficient way to do this, but it is currently working.<commit_after># MIT License
# Copyright (c) 2017 MassChallenge, Inc.
from django.apps import apps
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework_tracking.mixins import LoggingMixin
from impact.models.utils import snake_to_camel_case
from impact.permissions import DynamicModelPermissions
from impact.serializers import GeneralSerializer
model_attribute_calls = [
'Startup_additional_industries',
'Startup_recommendation_tags',
'StartupLabel_startups',
'RefundCode_programs',
'UserLabel_users',
'Observer_newsletter_cc_roles',
'ExpertProfile_functional_expertise',
'ExpertProfile_additional_industries',
'ExpertProfile_mentoring_specialties',
'Newsletter_recipient_roles',
'Section_interest_categories',
]
class GeneralViewSet(LoggingMixin, viewsets.ModelViewSet):
permission_classes = (
permissions.IsAuthenticated,
DynamicModelPermissions,
)
@property
def model(self):
if self.kwargs['model'] in model_attribute_calls:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=self.kwargs['model'])
else:
return apps.get_model(
app_label=self.kwargs['app'],
model_name=snake_to_camel_case(self.kwargs['model']))
def get_queryset(self):
return self.model.objects.all()
def get_serializer_class(self):
GeneralSerializer.Meta.model = self.model
return GeneralSerializer
|
81b5c5c29747d7f8622828c0036504a4a5023794
|
parse-demo.py
|
parse-demo.py
|
#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in nltk.sent_tokenize(data)]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {<DT>?<JJ>*<NN>}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for tree in trees:
print(tree)
#for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
#print(subtree)
|
#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
raw_sentences = nltk.sent_tokenize(data)
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in raw_sentences]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {((<DT>|<PRP\$>)?<JJ>*(<NN>|<NNP>)+|<PRP>)}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for index, tree in enumerate(trees):
print("===\nSentence: %s\nNoun phrases:" %
raw_sentences[index].replace('\n', ' '))
for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
print(" %s" % subtree)
|
Develop the Regex grammar slightly further
|
Develop the Regex grammar slightly further
|
Python
|
mit
|
alexander-bauer/syllabus-summary
|
#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in nltk.sent_tokenize(data)]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {<DT>?<JJ>*<NN>}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for tree in trees:
print(tree)
#for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
#print(subtree)
Develop the Regex grammar slightly further
|
#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
raw_sentences = nltk.sent_tokenize(data)
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in raw_sentences]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {((<DT>|<PRP\$>)?<JJ>*(<NN>|<NNP>)+|<PRP>)}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for index, tree in enumerate(trees):
print("===\nSentence: %s\nNoun phrases:" %
raw_sentences[index].replace('\n', ' '))
for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
print(" %s" % subtree)
|
<commit_before>#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in nltk.sent_tokenize(data)]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {<DT>?<JJ>*<NN>}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for tree in trees:
print(tree)
#for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
#print(subtree)
<commit_msg>Develop the Regex grammar slightly further<commit_after>
|
#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
raw_sentences = nltk.sent_tokenize(data)
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in raw_sentences]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {((<DT>|<PRP\$>)?<JJ>*(<NN>|<NNP>)+|<PRP>)}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for index, tree in enumerate(trees):
print("===\nSentence: %s\nNoun phrases:" %
raw_sentences[index].replace('\n', ' '))
for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
print(" %s" % subtree)
|
#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in nltk.sent_tokenize(data)]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {<DT>?<JJ>*<NN>}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for tree in trees:
print(tree)
#for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
#print(subtree)
Develop the Regex grammar slightly further#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
raw_sentences = nltk.sent_tokenize(data)
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in raw_sentences]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {((<DT>|<PRP\$>)?<JJ>*(<NN>|<NNP>)+|<PRP>)}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for index, tree in enumerate(trees):
print("===\nSentence: %s\nNoun phrases:" %
raw_sentences[index].replace('\n', ' '))
for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
print(" %s" % subtree)
|
<commit_before>#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in nltk.sent_tokenize(data)]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {<DT>?<JJ>*<NN>}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for tree in trees:
print(tree)
#for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
#print(subtree)
<commit_msg>Develop the Regex grammar slightly further<commit_after>#!/usr/bin/python3
import sys, os
import nltk
if len(sys.argv) < 2:
print("Please supply a filename.")
sys.exit(1)
filename = sys.argv[1]
with open(filename, 'r') as f:
data = f.read()
# Break the input down into sentences, then into words, and position tag
# those words.
raw_sentences = nltk.sent_tokenize(data)
sentences = [nltk.pos_tag(nltk.word_tokenize(sentence)) \
for sentence in raw_sentences]
# Define a grammar, and identify the noun phrases in the sentences.
chunk_parser = nltk.RegexpParser(r"NP: {((<DT>|<PRP\$>)?<JJ>*(<NN>|<NNP>)+|<PRP>)}")
trees = [chunk_parser.parse(sentence) for sentence in sentences]
for index, tree in enumerate(trees):
print("===\nSentence: %s\nNoun phrases:" %
raw_sentences[index].replace('\n', ' '))
for subtree in tree.subtrees(filter = lambda t: t.label() == 'NP'):
print(" %s" % subtree)
|
ac14efc0a8facbfe2fe7288734c86b27eb9b2770
|
openprocurement/tender/openeu/adapters.py
|
openprocurement/tender/openeu/adapters.py
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD, STATUS4ROLE
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
|
Add constant for complaint documents
|
Add constant for complaint documents
|
Python
|
apache-2.0
|
openprocurement/openprocurement.tender.openeu
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
Add constant for complaint documents
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD, STATUS4ROLE
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
|
<commit_before># -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
<commit_msg>Add constant for complaint documents<commit_after>
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD, STATUS4ROLE
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
|
# -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
Add constant for complaint documents# -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD, STATUS4ROLE
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
|
<commit_before># -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
<commit_msg>Add constant for complaint documents<commit_after># -*- coding: utf-8 -*-
from openprocurement.tender.core.adapters import TenderConfigurator
from openprocurement.tender.openeu.models import Tender
from openprocurement.tender.openua.constants import (
TENDERING_EXTRA_PERIOD, STATUS4ROLE
)
from openprocurement.tender.openeu.constants import (
TENDERING_DURATION, PREQUALIFICATION_COMPLAINT_STAND_STILL
)
class TenderAboveThresholdEUConfigurator(TenderConfigurator):
""" AboveThresholdEU Tender configuration adapter """
name = "AboveThresholdEU Tender configurator"
model = Tender
# duration of tendering period. timedelta object.
tendering_period_duration = TENDERING_DURATION
# duration of tender period extension. timedelta object
tendering_period_extra = TENDERING_EXTRA_PERIOD
# duration of pre-qualification stand-still period. timedelta object.
prequalification_complaint_stand_still = PREQUALIFICATION_COMPLAINT_STAND_STILL
block_tender_complaint_status = model.block_tender_complaint_status
block_complaint_status = model.block_complaint_status
# Dictionary with allowed complaint statuses for operations for each role
allowed_statuses_for_complaint_operations_for_roles = STATUS4ROLE
|
7bdcc30612636d2c27ea01a7d14b1839696fa7a0
|
newsman/watchdog/clean_process.py
|
newsman/watchdog/clean_process.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
Add code to remove defunct python processes
|
Add code to remove defunct python processes
|
Python
|
agpl-3.0
|
chengdujin/newsman,chengdujin/newsman,chengdujin/newsman
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
Add code to remove defunct python processes
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
<commit_before>#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
<commit_msg>Add code to remove defunct python processes<commit_after>
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
Add code to remove defunct python processes#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
<commit_before>#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
<commit_msg>Add code to remove defunct python processes<commit_after>#!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
clean zombie processes
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Aug. 22, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import subprocess
def clean():
"""
kill zombie processes if there is any
"""
command = "kill -HUP `ps -A -ostat,ppid | grep -e '^[Zz]' | awk '{print $2}'`"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
command = "ps -xal | grep p[y]thon | grep '<defunct>' | awk '{print $4}' | xargs kill -9"
subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
if __name__ == '__main__':
clean()
|
b5e7cb7946d87fa39c2a006808a0c07975f9c4d4
|
endymion/box.py
|
endymion/box.py
|
from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
def versions(self):
"""Return a tuple with all available box versions."""
return tuple(v['version'] for v in self._data['versions']
if v['status'] == 'active')
def providers(self, version):
"""Return a list of providers for a specific box version."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return [p['name'] for p in _ver['providers']]
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return ([p for p in _ver['providers']
if p['name'] == provider])[0]['url']
|
from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
# We need to preserve the order of the versions
self._versions = tuple(v['version'] for v in self._data['versions'])
# Prepare a data structure for quick lookups
self._boxes = {}
for v in self._data['versions']:
_version = v['version']
self._boxes[_version] = {}
for p in v['providers']:
_provider = p['name']
self._boxes[_version][_provider] = {}
self._boxes[_version][_provider]['url'] = p['url']
def versions(self):
"""Return a tuple with all available box versions."""
return self._versions
def providers(self, version):
"""Return a list of providers for a specific box version."""
return self._boxes[version].keys()
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
return self._boxes[version][provider]['url']
|
Index the Atlas data by version and provider
|
Index the Atlas data by version and provider
|
Python
|
mit
|
lpancescu/atlas-lint
|
from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
def versions(self):
"""Return a tuple with all available box versions."""
return tuple(v['version'] for v in self._data['versions']
if v['status'] == 'active')
def providers(self, version):
"""Return a list of providers for a specific box version."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return [p['name'] for p in _ver['providers']]
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return ([p for p in _ver['providers']
if p['name'] == provider])[0]['url']
Index the Atlas data by version and provider
|
from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
# We need to preserve the order of the versions
self._versions = tuple(v['version'] for v in self._data['versions'])
# Prepare a data structure for quick lookups
self._boxes = {}
for v in self._data['versions']:
_version = v['version']
self._boxes[_version] = {}
for p in v['providers']:
_provider = p['name']
self._boxes[_version][_provider] = {}
self._boxes[_version][_provider]['url'] = p['url']
def versions(self):
"""Return a tuple with all available box versions."""
return self._versions
def providers(self, version):
"""Return a list of providers for a specific box version."""
return self._boxes[version].keys()
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
return self._boxes[version][provider]['url']
|
<commit_before>from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
def versions(self):
"""Return a tuple with all available box versions."""
return tuple(v['version'] for v in self._data['versions']
if v['status'] == 'active')
def providers(self, version):
"""Return a list of providers for a specific box version."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return [p['name'] for p in _ver['providers']]
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return ([p for p in _ver['providers']
if p['name'] == provider])[0]['url']
<commit_msg>Index the Atlas data by version and provider<commit_after>
|
from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
# We need to preserve the order of the versions
self._versions = tuple(v['version'] for v in self._data['versions'])
# Prepare a data structure for quick lookups
self._boxes = {}
for v in self._data['versions']:
_version = v['version']
self._boxes[_version] = {}
for p in v['providers']:
_provider = p['name']
self._boxes[_version][_provider] = {}
self._boxes[_version][_provider]['url'] = p['url']
def versions(self):
"""Return a tuple with all available box versions."""
return self._versions
def providers(self, version):
"""Return a list of providers for a specific box version."""
return self._boxes[version].keys()
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
return self._boxes[version][provider]['url']
|
from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
def versions(self):
"""Return a tuple with all available box versions."""
return tuple(v['version'] for v in self._data['versions']
if v['status'] == 'active')
def providers(self, version):
"""Return a list of providers for a specific box version."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return [p['name'] for p in _ver['providers']]
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return ([p for p in _ver['providers']
if p['name'] == provider])[0]['url']
Index the Atlas data by version and providerfrom __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
# We need to preserve the order of the versions
self._versions = tuple(v['version'] for v in self._data['versions'])
# Prepare a data structure for quick lookups
self._boxes = {}
for v in self._data['versions']:
_version = v['version']
self._boxes[_version] = {}
for p in v['providers']:
_provider = p['name']
self._boxes[_version][_provider] = {}
self._boxes[_version][_provider]['url'] = p['url']
def versions(self):
"""Return a tuple with all available box versions."""
return self._versions
def providers(self, version):
"""Return a list of providers for a specific box version."""
return self._boxes[version].keys()
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
return self._boxes[version][provider]['url']
|
<commit_before>from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
def versions(self):
"""Return a tuple with all available box versions."""
return tuple(v['version'] for v in self._data['versions']
if v['status'] == 'active')
def providers(self, version):
"""Return a list of providers for a specific box version."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return [p['name'] for p in _ver['providers']]
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
_ver = ([v for v in self._data['versions']
if v['version'] == version])[0]
return ([p for p in _ver['providers']
if p['name'] == provider])[0]['url']
<commit_msg>Index the Atlas data by version and provider<commit_after>from __future__ import unicode_literals, print_function
import json
import urllib2
class Box(object):
"""Downloads and parses metainformation about a Vagrant box"""
def __init__(self, publisher, name):
"""Extract metainformation for a Vagrant box.
publisher -- Atlas owner
name -- Vagrant box name
"""
json_url = ("https://atlas.hashicorp.com/{0}/boxes/{1}/"
.format(publisher, name))
request = urllib2.Request(json_url, None,
{'Accept': 'application/json'})
json_file = urllib2.urlopen(request)
self._data = json.loads(json_file.read())
# We need to preserve the order of the versions
self._versions = tuple(v['version'] for v in self._data['versions'])
# Prepare a data structure for quick lookups
self._boxes = {}
for v in self._data['versions']:
_version = v['version']
self._boxes[_version] = {}
for p in v['providers']:
_provider = p['name']
self._boxes[_version][_provider] = {}
self._boxes[_version][_provider]['url'] = p['url']
def versions(self):
"""Return a tuple with all available box versions."""
return self._versions
def providers(self, version):
"""Return a list of providers for a specific box version."""
return self._boxes[version].keys()
def url(self, version, provider):
"""Return the download URL for a specific box version and provider."""
return self._boxes[version][provider]['url']
|
9a0f7e8b9da174008b33dd1d757b2e186b70e9f4
|
examples/network_correlations.py
|
examples/network_correlations.py
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network")
for i, network in enumerate(networks):
if i and network != networks[i - 1]:
ax.axhline(len(networks) - i, c="w")
ax.axvline(i, c="w")
f.tight_layout()
|
Improve networks heatmap gallery example
|
Improve networks heatmap gallery example
|
Python
|
bsd-3-clause
|
lukauskas/seaborn,mwaskom/seaborn,phobson/seaborn,parantapa/seaborn,drewokane/seaborn,cwu2011/seaborn,aashish24/seaborn,arokem/seaborn,bsipocz/seaborn,gef756/seaborn,mia1rab/seaborn,dimarkov/seaborn,muku42/seaborn,ebothmann/seaborn,petebachant/seaborn,anntzer/seaborn,lukauskas/seaborn,JWarmenhoven/seaborn,phobson/seaborn,Guokr1991/seaborn,mclevey/seaborn,olgabot/seaborn,jakevdp/seaborn,wrobstory/seaborn,anntzer/seaborn,clarkfitzg/seaborn,ashhher3/seaborn,lypzln/seaborn,sinhrks/seaborn,oesteban/seaborn,jat255/seaborn,Lx37/seaborn,sauliusl/seaborn,ischwabacher/seaborn,mwaskom/seaborn,arokem/seaborn,nileracecrew/seaborn,huongttlan/seaborn,q1ang/seaborn,tim777z/seaborn,aashish24/seaborn,dhimmel/seaborn,dotsdl/seaborn,kyleam/seaborn,uhjish/seaborn
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
Improve networks heatmap gallery example
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network")
for i, network in enumerate(networks):
if i and network != networks[i - 1]:
ax.axhline(len(networks) - i, c="w")
ax.axvline(i, c="w")
f.tight_layout()
|
<commit_before>"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
<commit_msg>Improve networks heatmap gallery example<commit_after>
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network")
for i, network in enumerate(networks):
if i and network != networks[i - 1]:
ax.axhline(len(networks) - i, c="w")
ax.axvline(i, c="w")
f.tight_layout()
|
"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
Improve networks heatmap gallery example"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network")
for i, network in enumerate(networks):
if i and network != networks[i - 1]:
ax.axhline(len(networks) - i, c="w")
ax.axvline(i, c="w")
f.tight_layout()
|
<commit_before>"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network").astype(int).values
start, end = ax.get_ylim()
rect_kws = dict(facecolor="none", edgecolor=".2",
linewidth=1.5, capstyle="projecting")
for n in range(1, 18):
n_nodes = (networks == n).sum()
rect = plt.Rectangle((start, end), n_nodes, -n_nodes, **rect_kws)
start += n_nodes
end -= n_nodes
ax.add_artist(rect)
f.tight_layout()
<commit_msg>Improve networks heatmap gallery example<commit_after>"""
Cortical networks correlation matrix
====================================
"""
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(context="paper", font="monospace")
df = sns.load_dataset("brain_networks", header=[0, 1, 2], index_col=0)
corrmat = df.corr()
f, ax = plt.subplots(figsize=(12, 9))
sns.heatmap(corrmat, vmax=.8, linewidths=0, square=True)
networks = corrmat.columns.get_level_values("network")
for i, network in enumerate(networks):
if i and network != networks[i - 1]:
ax.axhline(len(networks) - i, c="w")
ax.axvline(i, c="w")
f.tight_layout()
|
393a2f5f0ccfedc1c5ebd7de987c870419ca2d89
|
scripts/calculate_lqr_gain.py
|
scripts/calculate_lqr_gain.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
print(-K)
print()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
K = -K
for r in range(K.shape[0]):
row = ', '.join(str(elem) for elem in K[r, :])
if r != K.shape[0] - 1:
row += ','
print(row)
print()
|
Change LQR gain element printing
|
Change LQR gain element printing
Change printing of LQR gain elements for easier copying.
|
Python
|
bsd-2-clause
|
oliverlee/phobos,oliverlee/phobos,oliverlee/phobos,oliverlee/phobos
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
print(-K)
print()
Change LQR gain element printing
Change printing of LQR gain elements for easier copying.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
K = -K
for r in range(K.shape[0]):
row = ', '.join(str(elem) for elem in K[r, :])
if r != K.shape[0] - 1:
row += ','
print(row)
print()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
print(-K)
print()
<commit_msg>Change LQR gain element printing
Change printing of LQR gain elements for easier copying.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
K = -K
for r in range(K.shape[0]):
row = ', '.join(str(elem) for elem in K[r, :])
if r != K.shape[0] - 1:
row += ','
print(row)
print()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
print(-K)
print()
Change LQR gain element printing
Change printing of LQR gain elements for easier copying.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
K = -K
for r in range(K.shape[0]):
row = ', '.join(str(elem) for elem in K[r, :])
if r != K.shape[0] - 1:
row += ','
print(row)
print()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
print(-K)
print()
<commit_msg>Change LQR gain element printing
Change printing of LQR gain elements for easier copying.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import scipy
import control
from dtk.bicycle import benchmark_state_space_vs_speed, benchmark_matrices
def compute_whipple_lqr_gain(velocity):
_, A, B = benchmark_state_space_vs_speed(*benchmark_matrices(), velocity)
Q = np.diag([1e5, 1e3, 1e3, 1e2])
R = np.eye(2)
gains = [control.lqr(Ai, Bi, Q, R)[0] for Ai, Bi in zip(A, B)]
return gains
if __name__ == '__main__':
import sys
v_low = 0 # m/s
if len(sys.argv) > 1:
v_high = int(sys.argv[1])
else:
v_high = 1 # m/s
velocities = [v_low, v_high]
gains = compute_whipple_lqr_gain(velocities)
for v, K in zip(velocities, gains):
print('computed LQR controller feedback gain for v = {}'.format(v))
K = -K
for r in range(K.shape[0]):
row = ', '.join(str(elem) for elem in K[r, :])
if r != K.shape[0] - 1:
row += ','
print(row)
print()
|
f2506c07caf66b3ad42f6f1c09325097edd2e169
|
src/django_healthchecks/contrib.py
|
src/django_healthchecks/contrib.py
|
import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
cursor = connection.cursor()
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
|
import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
with connection.cursor() as cursor:
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
|
Make sure the cursor is properly closed after usage
|
Make sure the cursor is properly closed after usage
|
Python
|
mit
|
mvantellingen/django-healthchecks
|
import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
cursor = connection.cursor()
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
Make sure the cursor is properly closed after usage
|
import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
with connection.cursor() as cursor:
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
|
<commit_before>import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
cursor = connection.cursor()
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
<commit_msg>Make sure the cursor is properly closed after usage<commit_after>
|
import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
with connection.cursor() as cursor:
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
|
import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
cursor = connection.cursor()
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
Make sure the cursor is properly closed after usageimport uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
with connection.cursor() as cursor:
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
|
<commit_before>import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
cursor = connection.cursor()
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
<commit_msg>Make sure the cursor is properly closed after usage<commit_after>import uuid
from django.core.cache import cache
from django.db import connection
def check_database():
"""Check if the application can perform a dummy sql query"""
with connection.cursor() as cursor:
cursor.execute('SELECT 1; -- Healthcheck')
row = cursor.fetchone()
return row[0] == 1
def check_cache_default():
"""Check if the application can connect to the default cached and
read/write some dummy data.
"""
dummy = str(uuid.uuid4())
key = 'healthcheck:%s' % dummy
cache.set(key, dummy, timeout=5)
cached_value = cache.get(key)
return cached_value == dummy
def check_dummy_true():
return True
def check_dummy_false():
return False
def check_remote_addr(request):
return request.META['REMOTE_ADDR']
|
667fad716121d5675d5c66d42213965399d9b13d
|
clowder_server/views.py
|
clowder_server/views.py
|
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by(name)
return context
|
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name')
return context
|
Add quotes to order by
|
Add quotes to order by
|
Python
|
agpl-3.0
|
keithhackbarth/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,framewr/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,framewr/clowder_server
|
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by(name)
return context
Add quotes to order by
|
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name')
return context
|
<commit_before>from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by(name)
return context
<commit_msg>Add quotes to order by<commit_after>
|
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name')
return context
|
from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by(name)
return context
Add quotes to order byfrom braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name')
return context
|
<commit_before>from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by(name)
return context
<commit_msg>Add quotes to order by<commit_after>from braces.views import CsrfExemptMixin
from django.core.mail import send_mail
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from clowder_server.models import Ping
class APIView(CsrfExemptMixin, View):
def post(self, request):
name = request.POST.get('name')
value = request.POST.get('value')
status = int(request.POST.get('status', 1))
if status == -1:
send_mail('Subject here', 'Here is the message.', 'admin@clowder.io',
['keith@parkme.com'], fail_silently=False)
Ping.objects.create(
name=name,
value=value,
)
return HttpResponse('ok')
class DashboardView(TemplateView):
template_name = "dashboard.html"
def get_context_data(self, **context):
context['pings'] = Ping.objects.all().order_by('name')
return context
|
82c86350f62aefa3b732d5273216b0cb937758db
|
site/api/migrations/0009_load_pos_fixture_data.py
|
site/api/migrations/0009_load_pos_fixture_data.py
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(api, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(app_name, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
|
Fix bug in unload_fixture method
|
Fix bug in unload_fixture method
|
Python
|
mit
|
LitPalimpsest/parser-api-search,LitPalimpsest/parser-api-search,LitPalimpsest/parser-api-search
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(api, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
Fix bug in unload_fixture method
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(app_name, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
|
<commit_before># -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(api, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
<commit_msg>Fix bug in unload_fixture method<commit_after>
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(app_name, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(api, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
Fix bug in unload_fixture method# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(app_name, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
|
<commit_before># -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(api, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
<commit_msg>Fix bug in unload_fixture method<commit_after># -*- coding: utf-8 -*-
from django.db import models, migrations
from django.core.management import call_command
app_name = 'api'
fixture = model_name = 'PartOfSpeech'
def load_fixture(apps, schema_editor):
call_command('loaddata', fixture, app_label=app_name)
def unload_fixture(apps, schema_editor):
"Deleting all entries for this model"
PartOfSpeech = apps.get_model(app_name, model_name)
PartOfSpeech.objects.all().delete()
class Migration(migrations.Migration):
dependencies = [
(app_name, '0008_posmention'),
]
operations = [
migrations.RunPython(load_fixture, reverse_code=unload_fixture),
]
|
4979be89083fcdee5a9d4328ce6c9ac64d853b0b
|
src/satosa/logging_util.py
|
src/satosa/logging_util.py
|
from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
|
from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
state[LOGGER_STATE_KEY] = session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
|
Set the session-id in state
|
Set the session-id in state
This fixes the logger that kept getting a new uuid4 as a session-id for
each log call.
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com>
|
Python
|
apache-2.0
|
SUNET/SATOSA,SUNET/SATOSA,its-dirg/SATOSA
|
from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
Set the session-id in state
This fixes the logger that kept getting a new uuid4 as a session-id for
each log call.
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com>
|
from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
state[LOGGER_STATE_KEY] = session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
|
<commit_before>from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
<commit_msg>Set the session-id in state
This fixes the logger that kept getting a new uuid4 as a session-id for
each log call.
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com><commit_after>
|
from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
state[LOGGER_STATE_KEY] = session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
|
from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
Set the session-id in state
This fixes the logger that kept getting a new uuid4 as a session-id for
each log call.
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com>from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
state[LOGGER_STATE_KEY] = session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
|
<commit_before>from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
<commit_msg>Set the session-id in state
This fixes the logger that kept getting a new uuid4 as a session-id for
each log call.
Signed-off-by: Ivan Kanakarakis <f60d6943d72436645c4304926eeeac2718a1142c@gmail.com><commit_after>from uuid import uuid4
# The state key for saving the session id in the state
LOGGER_STATE_KEY = "SESSION_ID"
LOG_FMT = "[{id}] {message}"
def get_session_id(state):
session_id = (
"UNKNOWN"
if state is None
else state.get(LOGGER_STATE_KEY, uuid4().urn)
)
return session_id
def satosa_logging(logger, level, message, state, **kwargs):
"""
Adds a session ID to the message.
:type logger: logging
:type level: int
:type message: str
:type state: satosa.state.State
:param logger: Logger to use
:param level: Logger level (ex: logging.DEBUG/logging.WARN/...)
:param message: Message
:param state: The current state
:param kwargs: set exc_info=True to get an exception stack trace in the log
"""
state[LOGGER_STATE_KEY] = session_id = get_session_id(state)
logline = LOG_FMT.format(id=session_id, message=message)
logger.log(level, logline, **kwargs)
|
734891812ded9339180dc59f0d6e500b3d415512
|
hc2002/plugin/symbolic_values.py
|
hc2002/plugin/symbolic_values.py
|
import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
|
import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:', 'vpc:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
|
Allow symbolic values for vpc
|
Allow symbolic values for vpc
|
Python
|
apache-2.0
|
biochimia/hc2000
|
import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
Allow symbolic values for vpc
|
import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:', 'vpc:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
|
<commit_before>import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
<commit_msg>Allow symbolic values for vpc<commit_after>
|
import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:', 'vpc:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
|
import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
Allow symbolic values for vpcimport hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:', 'vpc:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
|
<commit_before>import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
<commit_msg>Allow symbolic values for vpc<commit_after>import hc2002.plugin as plugin
import hc2002.config as config
plugin.register_for_resource(__name__, 'hc2002.resource.instance')
_prefixes = ('availability-zone:', 'image:', 'kernel:', 'key:',
'load-balancers:', 'ramdisk:', 'security-groups:', 'spot-price:',
'subnet:', 'vpc:')
def apply(instance):
def resolve_symbol(original_value):
value = original_value
visited = set()
while isinstance(value, basestring) \
and value.startswith(prefix):
value = value.format(region=config.region, **instance)
if value in instance \
and value not in visited:
visited.add(value)
value = instance[value]
else:
if original_value == value:
raise Exception("Unable to resolve '%s'" % value)
else:
raise Exception(
"While resolving '%s': unable to resolve '%s'"
% (original_value, value))
return value
# Resolve symbols
for prefix in _prefixes:
key = prefix[:-1]
if key not in instance:
continue
if isinstance(instance[key], basestring):
instance[key] = resolve_symbol(instance[key])
elif isinstance(instance[key], list):
instance[key] = map(resolve_symbol, instance[key])
# Drop resolvable symbols
for key in instance.keys():
if key.startswith(_prefixes):
del instance[key]
|
2518dbc9dde39b58bf2a6d33975960059f39e3b0
|
{{cookiecutter.repo_name}}/setup.py
|
{{cookiecutter.repo_name}}/setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name }}',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
|
Fix issue with template rendering
|
Fix issue with template rendering
|
Python
|
bsd-3-clause
|
elbaschid/cc-django-app,elbaschid/cc-django-app
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
Fix issue with template rendering
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name }}',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
<commit_msg>Fix issue with template rendering<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name }}',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
Fix issue with template rendering#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name }}',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
<commit_msg>Fix issue with template rendering<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='{{ cookiecutter.repo_name }}',
version="0.0.0",
url='https://github.com/{{ cookiecutter.github_user }}/{{ cookiecutter.app_name }}',
author="{{ cookiecutter.author_name }}",
author_email="{{ cookiecutter.email }}",
description="{{ cookiecutter.description }}",
long_description='\n\n'.join([
open('README.rst').read(),
open('CHANGELOG.rst').read(),
]),
keywords="django, cms, pages, flatpages",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'Django',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
]
)
|
1947acc37d4f45a6b1969edfe3f7f10065e647b2
|
src/layeredconfig/strategy.py
|
src/layeredconfig/strategy.py
|
# -*- coding: utf-8 -*-
def add(new, previous=None):
if previous is None:
return new
return previous + new
def collect(new, previous=None):
if previous is None:
return [new]
return previous + [new]
def merge(new, previous=None):
return add(new, previous)
|
# -*- coding: utf-8 -*-
def add(next_, previous=None):
if previous is None:
return next_
return previous + next_
def collect(next_, previous=None):
if previous is None:
return [next_]
return previous + [next_]
def merge(next_, previous=None):
return add(next_, previous)
|
Rename misleading new to next
|
Rename misleading new to next
|
Python
|
bsd-3-clause
|
hakkeroid/lcconcept
|
# -*- coding: utf-8 -*-
def add(new, previous=None):
if previous is None:
return new
return previous + new
def collect(new, previous=None):
if previous is None:
return [new]
return previous + [new]
def merge(new, previous=None):
return add(new, previous)
Rename misleading new to next
|
# -*- coding: utf-8 -*-
def add(next_, previous=None):
if previous is None:
return next_
return previous + next_
def collect(next_, previous=None):
if previous is None:
return [next_]
return previous + [next_]
def merge(next_, previous=None):
return add(next_, previous)
|
<commit_before># -*- coding: utf-8 -*-
def add(new, previous=None):
if previous is None:
return new
return previous + new
def collect(new, previous=None):
if previous is None:
return [new]
return previous + [new]
def merge(new, previous=None):
return add(new, previous)
<commit_msg>Rename misleading new to next<commit_after>
|
# -*- coding: utf-8 -*-
def add(next_, previous=None):
if previous is None:
return next_
return previous + next_
def collect(next_, previous=None):
if previous is None:
return [next_]
return previous + [next_]
def merge(next_, previous=None):
return add(next_, previous)
|
# -*- coding: utf-8 -*-
def add(new, previous=None):
if previous is None:
return new
return previous + new
def collect(new, previous=None):
if previous is None:
return [new]
return previous + [new]
def merge(new, previous=None):
return add(new, previous)
Rename misleading new to next# -*- coding: utf-8 -*-
def add(next_, previous=None):
if previous is None:
return next_
return previous + next_
def collect(next_, previous=None):
if previous is None:
return [next_]
return previous + [next_]
def merge(next_, previous=None):
return add(next_, previous)
|
<commit_before># -*- coding: utf-8 -*-
def add(new, previous=None):
if previous is None:
return new
return previous + new
def collect(new, previous=None):
if previous is None:
return [new]
return previous + [new]
def merge(new, previous=None):
return add(new, previous)
<commit_msg>Rename misleading new to next<commit_after># -*- coding: utf-8 -*-
def add(next_, previous=None):
if previous is None:
return next_
return previous + next_
def collect(next_, previous=None):
if previous is None:
return [next_]
return previous + [next_]
def merge(next_, previous=None):
return add(next_, previous)
|
bba433a582a96f5acd59eedb3286e284d81f431d
|
src/nodeconductor_openstack/tests/test_backend.py
|
src/nodeconductor_openstack/tests/test_backend.py
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
Stop patcher in tear down
|
Stop patcher in tear down
- itacloud-7198
|
Python
|
mit
|
opennode/nodeconductor-openstack
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
Stop patcher in tear down
- itacloud-7198
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
<commit_before>import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
<commit_msg>Stop patcher in tear down
- itacloud-7198<commit_after>
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
Stop patcher in tear down
- itacloud-7198import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
<commit_before>import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
<commit_msg>Stop patcher in tear down
- itacloud-7198<commit_after>import mock
from rest_framework import test
class MockedSession(mock.MagicMock):
auth_ref = 'AUTH_REF'
class BaseBackendTestCase(test.APITransactionTestCase):
def setUp(self):
self.session_patcher = mock.patch('keystoneauth1.session.Session', MockedSession)
self.session_patcher.start()
self.session_recover_patcher = mock.patch('nodeconductor_openstack.backend.OpenStackSession.recover')
self.session_recover_patcher.start()
self.keystone_patcher = mock.patch('keystoneclient.v2_0.client.Client')
self.mocked_keystone = self.keystone_patcher.start()
self.nova_patcher = mock.patch('novaclient.v2.client.Client')
self.mocked_nova = self.nova_patcher.start()
self.cinder_patcher = mock.patch('cinderclient.v1.client.Client')
self.mocked_cinder = self.cinder_patcher.start()
def tearDown(self):
super(BaseBackendTestCase, self).tearDown()
self.session_patcher.stop()
self.session_recover_patcher.stop()
self.keystone_patcher.stop()
self.nova_patcher.stop()
self.cinder_patcher.stop()
|
3b21631f8c0d3820359c1163e7e5340f153c3938
|
twitter_bot.py
|
twitter_bot.py
|
# Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Search for latest tweets about query term
print twitter.search.tweets(q='satan')
|
# Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Load query term from configuration file
query_term = parser.get('query_settings','query_term')
# Search for latest tweets about query term
# Tweets has components 'search_metadata' and 'statuses' - we want the latter
tweets = twitter.search.tweets(q=query_term)['statuses']
# Extract tweetID, username and text of tweets returned from search
for tweet in tweets:
print tweet['id_str']
print tweet['user']['screen_name']
print tweet['text']
|
Add initial tweet extraction / load query term from config file
|
Add initial tweet extraction / load query term from config file
|
Python
|
mit
|
benhoyle/social-media-bot
|
# Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Search for latest tweets about query term
print twitter.search.tweets(q='satan')Add initial tweet extraction / load query term from config file
|
# Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Load query term from configuration file
query_term = parser.get('query_settings','query_term')
# Search for latest tweets about query term
# Tweets has components 'search_metadata' and 'statuses' - we want the latter
tweets = twitter.search.tweets(q=query_term)['statuses']
# Extract tweetID, username and text of tweets returned from search
for tweet in tweets:
print tweet['id_str']
print tweet['user']['screen_name']
print tweet['text']
|
<commit_before># Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Search for latest tweets about query term
print twitter.search.tweets(q='satan')<commit_msg>Add initial tweet extraction / load query term from config file<commit_after>
|
# Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Load query term from configuration file
query_term = parser.get('query_settings','query_term')
# Search for latest tweets about query term
# Tweets has components 'search_metadata' and 'statuses' - we want the latter
tweets = twitter.search.tweets(q=query_term)['statuses']
# Extract tweetID, username and text of tweets returned from search
for tweet in tweets:
print tweet['id_str']
print tweet['user']['screen_name']
print tweet['text']
|
# Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Search for latest tweets about query term
print twitter.search.tweets(q='satan')Add initial tweet extraction / load query term from config file# Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Load query term from configuration file
query_term = parser.get('query_settings','query_term')
# Search for latest tweets about query term
# Tweets has components 'search_metadata' and 'statuses' - we want the latter
tweets = twitter.search.tweets(q=query_term)['statuses']
# Extract tweetID, username and text of tweets returned from search
for tweet in tweets:
print tweet['id_str']
print tweet['user']['screen_name']
print tweet['text']
|
<commit_before># Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Search for latest tweets about query term
print twitter.search.tweets(q='satan')<commit_msg>Add initial tweet extraction / load query term from config file<commit_after># Import json parsing library
import json
# Import the necessary methods from "twitter" library
from twitter import Twitter, OAuth, TwitterHTTPError, TwitterStream
# Import "ConfigParser" library to load settings ("configparser" in python 3)
from ConfigParser import SafeConfigParser
# Load API variables from settings.cfg file
parser = SafeConfigParser()
parser.read('settings.cfg')
settings_dict = dict(parser.items('twitter_settings'))
oauth = OAuth(settings_dict['access_token'], settings_dict['access_secret'], settings_dict['consumer_key'], settings_dict['consumer_secret'])
# Initiate the connection to Twitter REST API
twitter = Twitter(auth=oauth)
# Load query term from configuration file
query_term = parser.get('query_settings','query_term')
# Search for latest tweets about query term
# Tweets has components 'search_metadata' and 'statuses' - we want the latter
tweets = twitter.search.tweets(q=query_term)['statuses']
# Extract tweetID, username and text of tweets returned from search
for tweet in tweets:
print tweet['id_str']
print tweet['user']['screen_name']
print tweet['text']
|
efbab142afc824f9b2ba4968ffe102c20e0ad7c3
|
rpp/encoder.py
|
rpp/encoder.py
|
from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, UUID):
return '{%s}' % value
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError("%r is not RPP serializable" % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
|
from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, float):
return '%.14f' % value
elif isinstance(value, UUID):
return '{%s}' % str(value).upper()
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
if indent == 0:
raise ValueError('Indent should be present')
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError('%r is not RPP serializable' % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
|
Correct representation of floats and uuids
|
Correct representation of floats and uuids
|
Python
|
bsd-3-clause
|
Perlence/rpp
|
from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, UUID):
return '{%s}' % value
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError("%r is not RPP serializable" % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
Correct representation of floats and uuids
|
from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, float):
return '%.14f' % value
elif isinstance(value, UUID):
return '{%s}' % str(value).upper()
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
if indent == 0:
raise ValueError('Indent should be present')
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError('%r is not RPP serializable' % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
|
<commit_before>from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, UUID):
return '{%s}' % value
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError("%r is not RPP serializable" % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
<commit_msg>Correct representation of floats and uuids<commit_after>
|
from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, float):
return '%.14f' % value
elif isinstance(value, UUID):
return '{%s}' % str(value).upper()
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
if indent == 0:
raise ValueError('Indent should be present')
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError('%r is not RPP serializable' % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
|
from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, UUID):
return '{%s}' % value
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError("%r is not RPP serializable" % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
Correct representation of floats and uuidsfrom uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, float):
return '%.14f' % value
elif isinstance(value, UUID):
return '{%s}' % str(value).upper()
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
if indent == 0:
raise ValueError('Indent should be present')
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError('%r is not RPP serializable' % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
|
<commit_before>from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, UUID):
return '{%s}' % value
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError("%r is not RPP serializable" % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
<commit_msg>Correct representation of floats and uuids<commit_after>from uuid import UUID
from scanner import Symbol
def tostr(value):
if isinstance(value, Symbol):
return str(value)
elif isinstance(value, str):
return '"%s"' % value
elif isinstance(value, float):
return '%.14f' % value
elif isinstance(value, UUID):
return '{%s}' % str(value).upper()
elif value is None:
return '-'
else:
return str(value)
def encode(lists, indent=2, level=0):
if indent == 0:
raise ValueError('Indent should be present')
result = '<'
for i, item in enumerate(lists):
if not isinstance(item, list):
raise TypeError('%r is not RPP serializable' % item)
if i > 0:
result += ' ' * (level + 1) * indent
if all(not isinstance(x, list) for x in item):
name, values = item[0].upper(), item[1:]
strvalues = map(tostr, values)
result += ' '.join([name] + strvalues)
else:
result += encode(item, level=(level + 1))
result += '\n' if indent else ' '
result += (' ' * level * indent) + '>'
return result
|
219afd2350d2b3aecd6362a1bd7a68291d3021e1
|
tests/test_vendpageparser.py
|
tests/test_vendpageparser.py
|
import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor'], '186612')
if __name__ == '__main__':
unittest.main()
|
import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor_id'], '186612')
if __name__ == '__main__':
unittest.main()
|
Change vendor key to vendor_id.
|
Change vendor key to vendor_id.
|
Python
|
mit
|
josetaas/vendcrawler,josetaas/vendcrawler,josetaas/vendcrawler
|
import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor'], '186612')
if __name__ == '__main__':
unittest.main()
Change vendor key to vendor_id.
|
import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor_id'], '186612')
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor'], '186612')
if __name__ == '__main__':
unittest.main()
<commit_msg>Change vendor key to vendor_id.<commit_after>
|
import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor_id'], '186612')
if __name__ == '__main__':
unittest.main()
|
import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor'], '186612')
if __name__ == '__main__':
unittest.main()
Change vendor key to vendor_id.import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor_id'], '186612')
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor'], '186612')
if __name__ == '__main__':
unittest.main()
<commit_msg>Change vendor key to vendor_id.<commit_after>import unittest
from vendcrawler.scripts.vendpageparser import VendPageParser
class TestVendPageParserMethods(unittest.TestCase):
def test_feed(self):
with open('test_vendcrawler.html', 'r') as f:
data = f.read()
vendpageparser = VendPageParser()
vendpageparser.feed(str(data))
self.assertEqual(vendpageparser.items[0]['id'], '2221')
self.assertEqual(vendpageparser.items[1]['name'], 'Buckler [1]')
self.assertEqual(vendpageparser.items[3]['amount'], '12')
self.assertEqual(vendpageparser.items[4]['vendor_id'], '186612')
if __name__ == '__main__':
unittest.main()
|
fad97c21e2643e5df9759ebf260881b26e918d7c
|
api/api/views/hacker/get/csv/resume_links.py
|
api/api/views/hacker/get/csv/resume_links.py
|
"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
|
"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo, UserInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Github',
'LinkedIn',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
user_info = UserInfo.objects.get(user=hacker.user)
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
user_info.github,
user_info.linkedin,
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
|
Add Github+LinkedIn to Hacker Data export
|
Add Github+LinkedIn to Hacker Data export
|
Python
|
apache-2.0
|
andrewsosa/hackfsu_com,andrewsosa/hackfsu_com,andrewsosa/hackfsu_com,andrewsosa/hackfsu_com
|
"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
Add Github+LinkedIn to Hacker Data export
|
"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo, UserInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Github',
'LinkedIn',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
user_info = UserInfo.objects.get(user=hacker.user)
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
user_info.github,
user_info.linkedin,
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
|
<commit_before>"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
<commit_msg>Add Github+LinkedIn to Hacker Data export<commit_after>
|
"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo, UserInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Github',
'LinkedIn',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
user_info = UserInfo.objects.get(user=hacker.user)
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
user_info.github,
user_info.linkedin,
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
|
"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
Add Github+LinkedIn to Hacker Data export"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo, UserInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Github',
'LinkedIn',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
user_info = UserInfo.objects.get(user=hacker.user)
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
user_info.github,
user_info.linkedin,
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
|
<commit_before>"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
<commit_msg>Add Github+LinkedIn to Hacker Data export<commit_after>"""
Generates a CSV containing approved hackers' resumes
"""
from hackfsu_com.views.generic import StreamedCsvView
from hackfsu_com.util import acl, files
from django.conf import settings
from api.models import Hackathon, HackerInfo, UserInfo
class ResumeLinksCsv(StreamedCsvView):
access_manager = acl.AccessManager(acl_accept=[acl.group_organizer])
file_name = 'HackFSU Approved Hackers\' Submitted Resumes.csv'
@staticmethod
def row_generator(request):
h = Hackathon.objects.current()
yield ['Approved Hackers\' Submitted Resumes']
yield [
'First Name',
'Last Name',
'Email',
'School',
'Github',
'LinkedIn',
'Attended',
'Resume File Name',
'Resume URL'
]
for hacker in HackerInfo.objects.filter(
hackathon=h,
approved=True
):
user_info = UserInfo.objects.get(user=hacker.user)
row = [
hacker.user.first_name,
hacker.user.last_name,
hacker.user.email,
str(hacker.school),
user_info.github,
user_info.linkedin,
hacker.attendee_status.checked_in_at is not None
]
if len(hacker.resume_file_name) > 0:
row.extend([
hacker.resume_file_name.split('/')[-1],
settings.URL_BASE + files.get_url(hacker.resume_file_name)
])
yield row
|
c23553f48652ed3ed65e473c79732dddc6c5341b
|
sample_code.py
|
sample_code.py
|
@commands.command
async def my_cmd():
await client.say('hi')
|
import discord
import asyncio
client = discord.Client()
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run('token')
|
Set sample code to discord.py basic example
|
Set sample code to discord.py basic example
|
Python
|
mit
|
TheTrain2000/async2rewrite
|
@commands.command
async def my_cmd():
await client.say('hi')Set sample code to discord.py basic example
|
import discord
import asyncio
client = discord.Client()
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run('token')
|
<commit_before>@commands.command
async def my_cmd():
await client.say('hi')<commit_msg>Set sample code to discord.py basic example<commit_after>
|
import discord
import asyncio
client = discord.Client()
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run('token')
|
@commands.command
async def my_cmd():
await client.say('hi')Set sample code to discord.py basic exampleimport discord
import asyncio
client = discord.Client()
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run('token')
|
<commit_before>@commands.command
async def my_cmd():
await client.say('hi')<commit_msg>Set sample code to discord.py basic example<commit_after>import discord
import asyncio
client = discord.Client()
@client.event
async def on_ready():
print('Logged in as')
print(client.user.name)
print(client.user.id)
print('------')
@client.event
async def on_message(message):
if message.content.startswith('!test'):
counter = 0
tmp = await client.send_message(message.channel, 'Calculating messages...')
async for log in client.logs_from(message.channel, limit=100):
if log.author == message.author:
counter += 1
await client.edit_message(tmp, 'You have {} messages.'.format(counter))
elif message.content.startswith('!sleep'):
await asyncio.sleep(5)
await client.send_message(message.channel, 'Done sleeping')
client.run('token')
|
d63302f10bf9972680c189a25f995b713e72562f
|
demo/apps/catalogue/models.py
|
demo/apps/catalogue/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
from oscar.apps.catalogue.models import * # noqa
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
def save(self, *args, **kwargs):
self.name = self.title
super(Category, self).save(*args, **kwargs)
from oscar.apps.catalogue.models import * # noqa
|
Set name field on save
|
Set name field on save
|
Python
|
mit
|
pgovers/oscar-wagtail-demo,pgovers/oscar-wagtail-demo
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
from oscar.apps.catalogue.models import * # noqa
Set name field on save
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
def save(self, *args, **kwargs):
self.name = self.title
super(Category, self).save(*args, **kwargs)
from oscar.apps.catalogue.models import * # noqa
|
<commit_before>from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
from oscar.apps.catalogue.models import * # noqa
<commit_msg>Set name field on save<commit_after>
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
def save(self, *args, **kwargs):
self.name = self.title
super(Category, self).save(*args, **kwargs)
from oscar.apps.catalogue.models import * # noqa
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
from oscar.apps.catalogue.models import * # noqa
Set name field on savefrom django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
def save(self, *args, **kwargs):
self.name = self.title
super(Category, self).save(*args, **kwargs)
from oscar.apps.catalogue.models import * # noqa
|
<commit_before>from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('name', classname='full'),
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
from oscar.apps.catalogue.models import * # noqa
<commit_msg>Set name field on save<commit_after>from django.db import models
from django.utils.translation import ugettext_lazy as _
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
class Category(Page):
"""
The Oscars Category as a Wagtail Page
This works because they both use Treebeard
"""
name = models.CharField(_('Name'), max_length=255, db_index=True)
description = models.TextField(_('Description'), blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
content_panels = Page.content_panels + [
FieldPanel('description', classname='full'),
ImageChooserPanel('image')
]
def save(self, *args, **kwargs):
self.name = self.title
super(Category, self).save(*args, **kwargs)
from oscar.apps.catalogue.models import * # noqa
|
6f1ece1571c0dafd386b27d8692433bf8b2180e7
|
test/flask_ssl_skeleton_test.py
|
test/flask_ssl_skeleton_test.py
|
import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
|
import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
|
Fix unit test for redirecting unauthed user to index
|
Fix unit test for redirecting unauthed user to index
|
Python
|
apache-2.0
|
krujos/flask_ssl_skeleton
|
import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
Fix unit test for redirecting unauthed user to index
|
import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
|
<commit_before>import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
<commit_msg>Fix unit test for redirecting unauthed user to index<commit_after>
|
import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
|
import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
Fix unit test for redirecting unauthed user to indeximport unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
|
<commit_before>import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
<commit_msg>Fix unit test for redirecting unauthed user to index<commit_after>import unittest
import flask_ssl_skeleton
class SkeletonTestCase(unittest.TestCase):
def setUp(self):
self.app = flask_ssl_skeleton.app
self.app.secret_key = "Unit secret"
self.client = self.app.test_client()
def test_root_returns_form_when_not_logged_on(self):
rv = self.client.get('/')
self.assertTrue('<form method="POST">' in rv.data)
def test_admin_redirects_to_login_when_not_logged_in(self):
rv = self.client.get('/admin')
self.assertEqual(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/'))
def test_root_redirects_when_logged_in(self):
rv = self.client.post('/', data=dict(username='admin', password='password'))
self.assertEquals(302, rv.status_code)
self.assertTrue(rv.headers['Location'].endswith('/admin'))
|
50b6778ae43b8945b2073630e351ab759b007a3e
|
tests/social/youtube/test_tasks.py
|
tests/social/youtube/test_tasks.py
|
# -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='revyver')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='revyver')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
# -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
Switch to Jen's channel to (hopefully) make these tests faster.
|
Switch to Jen's channel to (hopefully) make these tests faster.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
# -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='revyver')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='revyver')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
Switch to Jen's channel to (hopefully) make these tests faster.
|
# -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
<commit_before># -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='revyver')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='revyver')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
<commit_msg>Switch to Jen's channel to (hopefully) make these tests faster.<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
# -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='revyver')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='revyver')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
Switch to Jen's channel to (hopefully) make these tests faster.# -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
<commit_before># -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='revyver')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='revyver')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
<commit_msg>Switch to Jen's channel to (hopefully) make these tests faster.<commit_after># -*- coding: utf-8 -*-
import pytest
from components.social.youtube.factories import ChannelFactory
from components.social.youtube.models import Video
from components.social.youtube.tasks import (fetch_all_videos,
fetch_latest_videos)
pytestmark = pytest.mark.django_db
def test_fetch_all_videos():
channel = ChannelFactory(username='iceymoon')
fetch_all_videos(channel)
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
def test_fetch_latest_videos():
channel = ChannelFactory(username='iceymoon')
fetch_latest_videos()
assert channel.videos.count() > 0
for video in channel.videos.all():
assert isinstance(video, Video)
|
6666351757c2c2083a88158a132f446112109b9d
|
tests/test_redshift/test_server.py
|
tests/test_redshift/test_server.py
|
from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
json_data = json.loads(res.data.decode("utf-8"))
clusters = json_data['DescribeClustersResponse'][
'DescribeClustersResult']['Clusters']
list(clusters).should.equal([])
|
from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
result = res.data.decode("utf-8")
result.should.contain("<DescribeClustersResponse><DescribeClustersResult><Clusters></Clusters></DescribeClustersResult")
|
Fix redshift server to default to xml.
|
Fix redshift server to default to xml.
|
Python
|
apache-2.0
|
heddle317/moto,kefo/moto,botify-labs/moto,kefo/moto,Affirm/moto,ZuluPro/moto,Affirm/moto,Brett55/moto,2rs2ts/moto,dbfr3qs/moto,dbfr3qs/moto,okomestudio/moto,heddle317/moto,heddle317/moto,ZuluPro/moto,okomestudio/moto,botify-labs/moto,whummer/moto,william-richard/moto,gjtempleton/moto,dbfr3qs/moto,heddle317/moto,rocky4570/moto,gjtempleton/moto,gjtempleton/moto,okomestudio/moto,2rs2ts/moto,whummer/moto,spulec/moto,ZuluPro/moto,rocky4570/moto,rocky4570/moto,rocky4570/moto,kefo/moto,william-richard/moto,botify-labs/moto,kefo/moto,william-richard/moto,okomestudio/moto,Brett55/moto,Brett55/moto,ZuluPro/moto,gjtempleton/moto,william-richard/moto,botify-labs/moto,2rs2ts/moto,Brett55/moto,spulec/moto,okomestudio/moto,whummer/moto,dbfr3qs/moto,Brett55/moto,botify-labs/moto,rocky4570/moto,2rs2ts/moto,Affirm/moto,ZuluPro/moto,rocky4570/moto,whummer/moto,heddle317/moto,ZuluPro/moto,Affirm/moto,kefo/moto,spulec/moto,Affirm/moto,spulec/moto,william-richard/moto,botify-labs/moto,dbfr3qs/moto,william-richard/moto,dbfr3qs/moto,2rs2ts/moto,Brett55/moto,spulec/moto,Affirm/moto,gjtempleton/moto,spulec/moto,okomestudio/moto,whummer/moto,whummer/moto
|
from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
json_data = json.loads(res.data.decode("utf-8"))
clusters = json_data['DescribeClustersResponse'][
'DescribeClustersResult']['Clusters']
list(clusters).should.equal([])
Fix redshift server to default to xml.
|
from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
result = res.data.decode("utf-8")
result.should.contain("<DescribeClustersResponse><DescribeClustersResult><Clusters></Clusters></DescribeClustersResult")
|
<commit_before>from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
json_data = json.loads(res.data.decode("utf-8"))
clusters = json_data['DescribeClustersResponse'][
'DescribeClustersResult']['Clusters']
list(clusters).should.equal([])
<commit_msg>Fix redshift server to default to xml.<commit_after>
|
from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
result = res.data.decode("utf-8")
result.should.contain("<DescribeClustersResponse><DescribeClustersResult><Clusters></Clusters></DescribeClustersResult")
|
from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
json_data = json.loads(res.data.decode("utf-8"))
clusters = json_data['DescribeClustersResponse'][
'DescribeClustersResult']['Clusters']
list(clusters).should.equal([])
Fix redshift server to default to xml.from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
result = res.data.decode("utf-8")
result.should.contain("<DescribeClustersResponse><DescribeClustersResult><Clusters></Clusters></DescribeClustersResult")
|
<commit_before>from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
json_data = json.loads(res.data.decode("utf-8"))
clusters = json_data['DescribeClustersResponse'][
'DescribeClustersResult']['Clusters']
list(clusters).should.equal([])
<commit_msg>Fix redshift server to default to xml.<commit_after>from __future__ import unicode_literals
import json
import sure # noqa
import moto.server as server
from moto import mock_redshift
'''
Test the different server responses
'''
@mock_redshift
def test_describe_clusters():
backend = server.create_backend_app("redshift")
test_client = backend.test_client()
res = test_client.get('/?Action=DescribeClusters')
result = res.data.decode("utf-8")
result.should.contain("<DescribeClustersResponse><DescribeClustersResult><Clusters></Clusters></DescribeClustersResult")
|
f7059eb02ee93bdd0f998acde385a04ac91c63df
|
sparrow.py
|
sparrow.py
|
#!/usr/bin/env python
from ConfigParser import SafeConfigParser
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
config_file_name = 'config.ini'
#SECURE YOUR CONFIG FILE - Don't put it in source code
parser = SafeConfigParser()
parser.read(config_file_name)
API_KEY = parser.get(config_file_name,'API_KEY') #AKA 'Consumer Key'
API_SECRET = parser.get(config_file_name,'API_SECRET') #AKA 'Consumer Secret'
ACCESS_TOKEN = parser.get(config_file_name,'ACCESS_TOKEN') #AKA 'OAUTH Token'
ACCESS_TOKEN_SECRET = parser.get(config_file_name,'ACCESS_TOKEN_SECRET') #AKA 'OAUTH Token Secret'
twitter = Twython(API_KEY, API_SECRET,
ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
|
#!/usr/bin/env python
import json
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
with open('creds.json') as f:
credentials = json.loads(f.read())
#SECURE YOUR CONFIG FILE - Don't put it in source code
twitter = Twython(credentials["consumer_key"],
credentials["consumer_secret"],
credentials["access_token_key"],
credentials["access_token_secret"])
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
|
Update method of loading creds
|
Update method of loading creds
Changed the way of loading credentials from config.ini file to a json credentials file.
|
Python
|
mit
|
fmcorey/sparrow,fmcorey/sparrow
|
#!/usr/bin/env python
from ConfigParser import SafeConfigParser
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
config_file_name = 'config.ini'
#SECURE YOUR CONFIG FILE - Don't put it in source code
parser = SafeConfigParser()
parser.read(config_file_name)
API_KEY = parser.get(config_file_name,'API_KEY') #AKA 'Consumer Key'
API_SECRET = parser.get(config_file_name,'API_SECRET') #AKA 'Consumer Secret'
ACCESS_TOKEN = parser.get(config_file_name,'ACCESS_TOKEN') #AKA 'OAUTH Token'
ACCESS_TOKEN_SECRET = parser.get(config_file_name,'ACCESS_TOKEN_SECRET') #AKA 'OAUTH Token Secret'
twitter = Twython(API_KEY, API_SECRET,
ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
Update method of loading creds
Changed the way of loading credentials from config.ini file to a json credentials file.
|
#!/usr/bin/env python
import json
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
with open('creds.json') as f:
credentials = json.loads(f.read())
#SECURE YOUR CONFIG FILE - Don't put it in source code
twitter = Twython(credentials["consumer_key"],
credentials["consumer_secret"],
credentials["access_token_key"],
credentials["access_token_secret"])
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
|
<commit_before>#!/usr/bin/env python
from ConfigParser import SafeConfigParser
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
config_file_name = 'config.ini'
#SECURE YOUR CONFIG FILE - Don't put it in source code
parser = SafeConfigParser()
parser.read(config_file_name)
API_KEY = parser.get(config_file_name,'API_KEY') #AKA 'Consumer Key'
API_SECRET = parser.get(config_file_name,'API_SECRET') #AKA 'Consumer Secret'
ACCESS_TOKEN = parser.get(config_file_name,'ACCESS_TOKEN') #AKA 'OAUTH Token'
ACCESS_TOKEN_SECRET = parser.get(config_file_name,'ACCESS_TOKEN_SECRET') #AKA 'OAUTH Token Secret'
twitter = Twython(API_KEY, API_SECRET,
ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
<commit_msg>Update method of loading creds
Changed the way of loading credentials from config.ini file to a json credentials file.<commit_after>
|
#!/usr/bin/env python
import json
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
with open('creds.json') as f:
credentials = json.loads(f.read())
#SECURE YOUR CONFIG FILE - Don't put it in source code
twitter = Twython(credentials["consumer_key"],
credentials["consumer_secret"],
credentials["access_token_key"],
credentials["access_token_secret"])
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
|
#!/usr/bin/env python
from ConfigParser import SafeConfigParser
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
config_file_name = 'config.ini'
#SECURE YOUR CONFIG FILE - Don't put it in source code
parser = SafeConfigParser()
parser.read(config_file_name)
API_KEY = parser.get(config_file_name,'API_KEY') #AKA 'Consumer Key'
API_SECRET = parser.get(config_file_name,'API_SECRET') #AKA 'Consumer Secret'
ACCESS_TOKEN = parser.get(config_file_name,'ACCESS_TOKEN') #AKA 'OAUTH Token'
ACCESS_TOKEN_SECRET = parser.get(config_file_name,'ACCESS_TOKEN_SECRET') #AKA 'OAUTH Token Secret'
twitter = Twython(API_KEY, API_SECRET,
ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
Update method of loading creds
Changed the way of loading credentials from config.ini file to a json credentials file.#!/usr/bin/env python
import json
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
with open('creds.json') as f:
credentials = json.loads(f.read())
#SECURE YOUR CONFIG FILE - Don't put it in source code
twitter = Twython(credentials["consumer_key"],
credentials["consumer_secret"],
credentials["access_token_key"],
credentials["access_token_secret"])
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
|
<commit_before>#!/usr/bin/env python
from ConfigParser import SafeConfigParser
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
config_file_name = 'config.ini'
#SECURE YOUR CONFIG FILE - Don't put it in source code
parser = SafeConfigParser()
parser.read(config_file_name)
API_KEY = parser.get(config_file_name,'API_KEY') #AKA 'Consumer Key'
API_SECRET = parser.get(config_file_name,'API_SECRET') #AKA 'Consumer Secret'
ACCESS_TOKEN = parser.get(config_file_name,'ACCESS_TOKEN') #AKA 'OAUTH Token'
ACCESS_TOKEN_SECRET = parser.get(config_file_name,'ACCESS_TOKEN_SECRET') #AKA 'OAUTH Token Secret'
twitter = Twython(API_KEY, API_SECRET,
ACCESS_TOKEN, ACCESS_TOKEN_SECRET)
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
<commit_msg>Update method of loading creds
Changed the way of loading credentials from config.ini file to a json credentials file.<commit_after>#!/usr/bin/env python
import json
from twython import Twython
#These values are all pulled from a file called 'config.ini'
#You can call yours myawesomebotconfig.ini or whatever else!
#Just remember to change it here
with open('creds.json') as f:
credentials = json.loads(f.read())
#SECURE YOUR CONFIG FILE - Don't put it in source code
twitter = Twython(credentials["consumer_key"],
credentials["consumer_secret"],
credentials["access_token_key"],
credentials["access_token_secret"])
def send_tweet(tweet_text):
twitter.update_status(status = tweet_text)
send_tweet("This is my first tweet with Sparrow by @fmcorey - https://github.com/fmcorey/sparrow")
|
c5d656cff3e7ac218cc41805dfb8c19f63cd4250
|
run_server.py
|
run_server.py
|
#!/usr/bin/env python3
from shorter.web import app
if __name__ == "__main__":
app.run()
|
#!/usr/bin/env python3
from shorter.database import (
User,
db_session,
)
from shorter.web import app
if __name__ == "__main__":
# makes testing easier
test_user_created = db_session.query(User).filter_by(
username='jimmy').one_or_none()
if not test_user_created:
db_session.add(
User(username='jimmy', password='secret'))
db_session.commit()
app.run()
|
Create a testing user on starting the server
|
Create a testing user on starting the server
|
Python
|
agpl-3.0
|
mapleoin/shorter
|
#!/usr/bin/env python3
from shorter.web import app
if __name__ == "__main__":
app.run()
Create a testing user on starting the server
|
#!/usr/bin/env python3
from shorter.database import (
User,
db_session,
)
from shorter.web import app
if __name__ == "__main__":
# makes testing easier
test_user_created = db_session.query(User).filter_by(
username='jimmy').one_or_none()
if not test_user_created:
db_session.add(
User(username='jimmy', password='secret'))
db_session.commit()
app.run()
|
<commit_before>#!/usr/bin/env python3
from shorter.web import app
if __name__ == "__main__":
app.run()
<commit_msg>Create a testing user on starting the server<commit_after>
|
#!/usr/bin/env python3
from shorter.database import (
User,
db_session,
)
from shorter.web import app
if __name__ == "__main__":
# makes testing easier
test_user_created = db_session.query(User).filter_by(
username='jimmy').one_or_none()
if not test_user_created:
db_session.add(
User(username='jimmy', password='secret'))
db_session.commit()
app.run()
|
#!/usr/bin/env python3
from shorter.web import app
if __name__ == "__main__":
app.run()
Create a testing user on starting the server#!/usr/bin/env python3
from shorter.database import (
User,
db_session,
)
from shorter.web import app
if __name__ == "__main__":
# makes testing easier
test_user_created = db_session.query(User).filter_by(
username='jimmy').one_or_none()
if not test_user_created:
db_session.add(
User(username='jimmy', password='secret'))
db_session.commit()
app.run()
|
<commit_before>#!/usr/bin/env python3
from shorter.web import app
if __name__ == "__main__":
app.run()
<commit_msg>Create a testing user on starting the server<commit_after>#!/usr/bin/env python3
from shorter.database import (
User,
db_session,
)
from shorter.web import app
if __name__ == "__main__":
# makes testing easier
test_user_created = db_session.query(User).filter_by(
username='jimmy').one_or_none()
if not test_user_created:
db_session.add(
User(username='jimmy', password='secret'))
db_session.commit()
app.run()
|
393f65b78e5808c9d71b38a108cb85f6d31886c1
|
helpers/text.py
|
helpers/text.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def urlify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
Rename function from urlify to slugify
|
Rename function from urlify to slugify
|
Python
|
mit
|
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def urlify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
Rename function from urlify to slugify
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def urlify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
<commit_msg>Rename function from urlify to slugify<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def urlify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
Rename function from urlify to slugify#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def urlify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
<commit_msg>Rename function from urlify to slugify<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
108c696d032462ac3cdc00e45ead09136e80634a
|
tests/foomodulegen-auto.py
|
tests/foomodulegen-auto.py
|
#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
if __name__ == '__main__':
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
|
#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
def main():
if sys.argv[1] == '-d':
del sys.argv[1]
import pdb
pdb.set_trace()
my_module_gen()
else:
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
if __name__ == '__main__':
main()
|
Add a debug switch (-d) to enable debugger
|
Add a debug switch (-d) to enable debugger
|
Python
|
lgpl-2.1
|
gjcarneiro/pybindgen,cawka/pybindgen-old,ftalbrecht/pybindgen,gjcarneiro/pybindgen,cawka/pybindgen-old,ftalbrecht/pybindgen,gjcarneiro/pybindgen,ftalbrecht/pybindgen,cawka/pybindgen-old,gjcarneiro/pybindgen,ftalbrecht/pybindgen,cawka/pybindgen-old
|
#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
if __name__ == '__main__':
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
Add a debug switch (-d) to enable debugger
|
#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
def main():
if sys.argv[1] == '-d':
del sys.argv[1]
import pdb
pdb.set_trace()
my_module_gen()
else:
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
if __name__ == '__main__':
main()
|
<commit_before>#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
if __name__ == '__main__':
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
<commit_msg>Add a debug switch (-d) to enable debugger<commit_after>
|
#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
def main():
if sys.argv[1] == '-d':
del sys.argv[1]
import pdb
pdb.set_trace()
my_module_gen()
else:
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
if __name__ == '__main__':
main()
|
#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
if __name__ == '__main__':
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
Add a debug switch (-d) to enable debugger#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
def main():
if sys.argv[1] == '-d':
del sys.argv[1]
import pdb
pdb.set_trace()
my_module_gen()
else:
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
if __name__ == '__main__':
main()
|
<commit_before>#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
if __name__ == '__main__':
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
<commit_msg>Add a debug switch (-d) to enable debugger<commit_after>#! /usr/bin/env python
import sys
import re
import pybindgen
from pybindgen.typehandlers import base as typehandlers
from pybindgen import (ReturnValue, Parameter, Module, Function, FileCodeSink)
from pybindgen import (CppMethod, CppConstructor, CppClass, Enum)
from pybindgen.gccxmlparser import ModuleParser
from pybindgen.function import CustomFunctionWrapper
from pybindgen.cppmethod import CustomCppMethodWrapper
import foomodulegen_common
def my_module_gen():
out = FileCodeSink(sys.stdout)
pygen_file = open(sys.argv[2], "wt")
module_parser = ModuleParser('foo2', '::')
module = module_parser.parse([sys.argv[1]], includes=['"foo.h"'], pygen_sink=FileCodeSink(pygen_file))
pygen_file.close()
foomodulegen_common.customize_module(module)
module.generate(out)
def main():
if sys.argv[1] == '-d':
del sys.argv[1]
import pdb
pdb.set_trace()
my_module_gen()
else:
try:
import cProfile as profile
except ImportError:
my_module_gen()
else:
print >> sys.stderr, "** running under profiler"
profile.run('my_module_gen()', 'foomodulegen-auto.pstat')
if __name__ == '__main__':
main()
|
c7f0534554236b7336cfb79b2ec2badb6780706e
|
bucketeer/test/test_commit.py
|
bucketeer/test/test_commit.py
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main(buffer = True)
|
Add suppression of tested code stdout in tests
|
Add suppression of tested code stdout in tests
Add `buffer = True` option to unittest main method call, which
suppresses any printing the code being tested has. This makes for
cleaner test suite output.
|
Python
|
mit
|
mgarbacz/bucketeer
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
Add suppression of tested code stdout in tests
Add `buffer = True` option to unittest main method call, which
suppresses any printing the code being tested has. This makes for
cleaner test suite output.
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main(buffer = True)
|
<commit_before>import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add suppression of tested code stdout in tests
Add `buffer = True` option to unittest main method call, which
suppresses any printing the code being tested has. This makes for
cleaner test suite output.<commit_after>
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main(buffer = True)
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
Add suppression of tested code stdout in tests
Add `buffer = True` option to unittest main method call, which
suppresses any printing the code being tested has. This makes for
cleaner test suite output.import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main(buffer = True)
|
<commit_before>import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add suppression of tested code stdout in tests
Add `buffer = True` option to unittest main method call, which
suppresses any printing the code being tested has. This makes for
cleaner test suite output.<commit_after>import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
# Constants - TODO move to config file
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
connection = boto.connect_s3()
# Create a bucket to test on existing bucket
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
connection = boto.connect_s3()
# Remove all files uploaded to s3
bucket = connection.get_bucket(existing_bucket)
for s3_file in bucket.list():
bucket.delete_key(s3_file.key)
# Remove bucket created to test on existing bucket
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
def testNewFileUploadToExistingBucket(self):
result = commit.commit_to_s3(existing_bucket, test_dir)
self.assertTrue(result)
if __name__ == '__main__':
unittest.main(buffer = True)
|
aaafd23df800d41e4b16fd399015991b2e426dc5
|
tests/setup.py
|
tests/setup.py
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.3.10", "requests>=2.5.0"]
)
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.4.0", "requests>=2.5.0"]
)
|
Advance system test ducktape dependency from 0.3.10 to 0.4.0
|
MINOR: Advance system test ducktape dependency from 0.3.10 to 0.4.0
Previous version of ducktape was found to have a memory leak which caused occasional failures in nightly runs.
Author: Geoff Anderson <geoff@confluent.io>
Reviewers: Ewen Cheslack-Postava <ewen@confluent.io>
Closes #1165 from granders/minor-advance-ducktape-to-0.4.0
|
Python
|
apache-2.0
|
gf53520/kafka,MyPureCloud/kafka,geeag/kafka,lindong28/kafka,geeag/kafka,Ishiihara/kafka,Ishiihara/kafka,lindong28/kafka,rhauch/kafka,geeag/kafka,noslowerdna/kafka,rhauch/kafka,MyPureCloud/kafka,mihbor/kafka,airbnb/kafka,eribeiro/kafka,eribeiro/kafka,mihbor/kafka,Esquive/kafka,airbnb/kafka,guozhangwang/kafka,apache/kafka,KevinLiLu/kafka,sebadiaz/kafka,Ishiihara/kafka,ErikKringen/kafka,ijuma/kafka,TiVo/kafka,KevinLiLu/kafka,ijuma/kafka,Esquive/kafka,ErikKringen/kafka,sslavic/kafka,Chasego/kafka,ErikKringen/kafka,lindong28/kafka,mihbor/kafka,themarkypantz/kafka,eribeiro/kafka,rhauch/kafka,guozhangwang/kafka,sslavic/kafka,ErikKringen/kafka,ollie314/kafka,noslowerdna/kafka,TiVo/kafka,TiVo/kafka,apache/kafka,rhauch/kafka,richhaase/kafka,geeag/kafka,richhaase/kafka,themarkypantz/kafka,Chasego/kafka,sebadiaz/kafka,guozhangwang/kafka,mihbor/kafka,apache/kafka,sslavic/kafka,Chasego/kafka,ijuma/kafka,noslowerdna/kafka,Chasego/kafka,eribeiro/kafka,sslavic/kafka,MyPureCloud/kafka,KevinLiLu/kafka,airbnb/kafka,richhaase/kafka,richhaase/kafka,noslowerdna/kafka,apache/kafka,guozhangwang/kafka,ijuma/kafka,gf53520/kafka,lindong28/kafka,KevinLiLu/kafka,Esquive/kafka,ollie314/kafka,ollie314/kafka,ollie314/kafka,gf53520/kafka,MyPureCloud/kafka,themarkypantz/kafka,airbnb/kafka,TiVo/kafka,gf53520/kafka,sebadiaz/kafka,Ishiihara/kafka,Esquive/kafka,themarkypantz/kafka,sebadiaz/kafka
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.3.10", "requests>=2.5.0"]
)
MINOR: Advance system test ducktape dependency from 0.3.10 to 0.4.0
Previous version of ducktape was found to have a memory leak which caused occasional failures in nightly runs.
Author: Geoff Anderson <geoff@confluent.io>
Reviewers: Ewen Cheslack-Postava <ewen@confluent.io>
Closes #1165 from granders/minor-advance-ducktape-to-0.4.0
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.4.0", "requests>=2.5.0"]
)
|
<commit_before># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.3.10", "requests>=2.5.0"]
)
<commit_msg>MINOR: Advance system test ducktape dependency from 0.3.10 to 0.4.0
Previous version of ducktape was found to have a memory leak which caused occasional failures in nightly runs.
Author: Geoff Anderson <geoff@confluent.io>
Reviewers: Ewen Cheslack-Postava <ewen@confluent.io>
Closes #1165 from granders/minor-advance-ducktape-to-0.4.0<commit_after>
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.4.0", "requests>=2.5.0"]
)
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.3.10", "requests>=2.5.0"]
)
MINOR: Advance system test ducktape dependency from 0.3.10 to 0.4.0
Previous version of ducktape was found to have a memory leak which caused occasional failures in nightly runs.
Author: Geoff Anderson <geoff@confluent.io>
Reviewers: Ewen Cheslack-Postava <ewen@confluent.io>
Closes #1165 from granders/minor-advance-ducktape-to-0.4.0# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.4.0", "requests>=2.5.0"]
)
|
<commit_before># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.3.10", "requests>=2.5.0"]
)
<commit_msg>MINOR: Advance system test ducktape dependency from 0.3.10 to 0.4.0
Previous version of ducktape was found to have a memory leak which caused occasional failures in nightly runs.
Author: Geoff Anderson <geoff@confluent.io>
Reviewers: Ewen Cheslack-Postava <ewen@confluent.io>
Closes #1165 from granders/minor-advance-ducktape-to-0.4.0<commit_after># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# see kafka.server.KafkaConfig for additional details and defaults
import re
from setuptools import find_packages, setup
version = ''
with open('kafkatest/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
setup(name="kafkatest",
version=version,
description="Apache Kafka System Tests",
author="Apache Kafka",
platforms=["any"],
license="apache2.0",
packages=find_packages(),
include_package_data=True,
install_requires=["ducktape==0.4.0", "requests>=2.5.0"]
)
|
e50a3078c4feb8222e0f3b35ad19e9e9585d7ebd
|
sc2/bot_ai.py
|
sc2/bot_ai.py
|
from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespnene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
|
from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
|
Prepare with vespenen gas count too
|
Prepare with vespenen gas count too
|
Python
|
mit
|
Dentosal/python-sc2
|
from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespnene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
Prepare with vespenen gas count too
|
from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
|
<commit_before>from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespnene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
<commit_msg>Prepare with vespenen gas count too<commit_after>
|
from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
|
from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespnene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
Prepare with vespenen gas count toofrom functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
|
<commit_before>from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespnene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
<commit_msg>Prepare with vespenen gas count too<commit_after>from functools import partial
from .data import ActionResult
class BotAI(object):
def _prepare_start(self, client, game_info, game_data):
self._client = client
self._game_info = game_info
self._game_data = game_data
self.do = partial(self._client.actions, game_data=game_data)
@property
def game_info(self):
return self._game_info
@property
def enemy_start_locations(self):
return self._game_info.start_locations
async def can_place(self, building, position):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, [position])
return r[0] == ActionResult.Success
async def select_placement(self, building, positions):
ability_id = self._game_data.find_ability_by_name(f"Build {building}").id
r = await self._client.query_building_placement(ability_id, positions)
print(r)
exit("!!!!")
def _prepare_step(self, state):
self.units = state.units.owned
self.minerals = state.common.minerals
self.vespene = state.common.vespene
def on_start(self):
pass
async def on_step(self, do, state, game_loop):
raise NotImplementedError
|
fa936770448e896211b592b742eee350633804e7
|
server/app.py
|
server/app.py
|
from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
print file.filename
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
|
from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
|
Check if uploads folder exists
|
Check if uploads folder exists
|
Python
|
mit
|
spb201/turbulent-octo-rutabaga-api,spb201/turbulent-octo-rutabaga-api,spb201/turbulent-octo-rutabaga-api
|
from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
print file.filename
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
Check if uploads folder exists
|
from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
print file.filename
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Check if uploads folder exists<commit_after>
|
from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
|
from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
print file.filename
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
Check if uploads folder existsfrom flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
|
<commit_before>from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
print file.filename
if file:
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
<commit_msg>Check if uploads folder exists<commit_after>from flask import Flask, request
from werkzeug import secure_filename
import os
app = Flask(__name__)
UPLOAD_FOLDER = 'uploads/'
@app.route("/")
def hello():
return "Hello World!"
@app.route("/upload", methods=['POST'])
def upload_file():
file = request.files['sound']
if file:
if not os.path.isdir(UPLOAD_FOLDER):
os.mkdir(UPLOAD_FOLDER)
filename = secure_filename(file.filename)
file.save(os.path.join(UPLOAD_FOLDER, filename))
return filename
if __name__ == "__main__":
app.run(debug=True)
|
209a0c8ab878d9f455d117314b8945cfdd367857
|
test/scripts/run_all_tests.py
|
test/scripts/run_all_tests.py
|
"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)
|
"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Skip versions with no executable.
if installation.exec_path == None:
print("Skipping (no executable)", installation, end="\n\n", flush=True)
continue
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping (PythonCore)", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping (too old)", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)
|
Make the test runner more resilient to weird registry configurations.
|
Make the test runner more resilient to weird registry configurations.
|
Python
|
mit
|
SeanCline/PyExt,SeanCline/PyExt,SeanCline/PyExt
|
"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)Make the test runner more resilient to weird registry configurations.
|
"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Skip versions with no executable.
if installation.exec_path == None:
print("Skipping (no executable)", installation, end="\n\n", flush=True)
continue
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping (PythonCore)", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping (too old)", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)
|
<commit_before>"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)<commit_msg>Make the test runner more resilient to weird registry configurations.<commit_after>
|
"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Skip versions with no executable.
if installation.exec_path == None:
print("Skipping (no executable)", installation, end="\n\n", flush=True)
continue
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping (PythonCore)", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping (too old)", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)
|
"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)Make the test runner more resilient to weird registry configurations."""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Skip versions with no executable.
if installation.exec_path == None:
print("Skipping (no executable)", installation, end="\n\n", flush=True)
continue
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping (PythonCore)", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping (too old)", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)
|
<commit_before>"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)<commit_msg>Make the test runner more resilient to weird registry configurations.<commit_after>"""Runs the PyExtTest project against all installed python instances."""
import sys, subprocess, python_installations
if __name__ == '__main__':
num_failed_tests = 0
for installation in python_installations.get_python_installations():
# Skip versions with no executable.
if installation.exec_path == None:
print("Skipping (no executable)", installation, end="\n\n", flush=True)
continue
# Only test against official CPython installations.
if installation.company != "PythonCore":
print("Skipping (PythonCore)", installation, end="\n\n", flush=True)
continue
# Also skip versions before 2.7 since they don't have symbols.
version = tuple(int(n) for n in installation.sys_version.split("."))
if version < (2, 7):
print("Skipping (too old)", installation, end="\n\n", flush=True)
continue
# Create the dump files.
print("Creating dump files with python executable:", installation.exec_path, flush=True)
subprocess.check_call([installation.exec_path, "object_types.py"])
subprocess.check_call([installation.exec_path, "fibonacci_test.py"])
# Run the tests against the dump files.
py_ext_test_exe = sys.argv[1] if len(sys.argv) > 1 else "../../x64/Debug/PyExtTest.exe"
num_failed_tests += subprocess.call(py_ext_test_exe)
sys.exit(num_failed_tests)
|
4387a8a38664abe86f0ff9d531ab3ba937f9adf7
|
tests/unit/test_main_views.py
|
tests/unit/test_main_views.py
|
import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def setup(self):
self.patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
def test_page_load(self, db_session):
db_session.add(self.patient1)
db_session.commit()
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
|
import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def test_setup(self, db_session):
patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
user = User()
consultant = User()
meeting = Meeting()
db_session.add(patient1)
db_session.commit()
def test_page_load(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
def test_kept_in_db(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
assert req_pass.status_code == 200
|
Add Unit tests for views
|
Add Unit tests for views
|
Python
|
mit
|
stefpiatek/mdt-flask-app,stefpiatek/mdt-flask-app
|
import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def setup(self):
self.patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
def test_page_load(self, db_session):
db_session.add(self.patient1)
db_session.commit()
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
Add Unit tests for views
|
import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def test_setup(self, db_session):
patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
user = User()
consultant = User()
meeting = Meeting()
db_session.add(patient1)
db_session.commit()
def test_page_load(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
def test_kept_in_db(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
assert req_pass.status_code == 200
|
<commit_before>import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def setup(self):
self.patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
def test_page_load(self, db_session):
db_session.add(self.patient1)
db_session.commit()
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
<commit_msg>Add Unit tests for views<commit_after>
|
import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def test_setup(self, db_session):
patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
user = User()
consultant = User()
meeting = Meeting()
db_session.add(patient1)
db_session.commit()
def test_page_load(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
def test_kept_in_db(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
assert req_pass.status_code == 200
|
import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def setup(self):
self.patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
def test_page_load(self, db_session):
db_session.add(self.patient1)
db_session.commit()
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
Add Unit tests for viewsimport pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def test_setup(self, db_session):
patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
user = User()
consultant = User()
meeting = Meeting()
db_session.add(patient1)
db_session.commit()
def test_page_load(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
def test_kept_in_db(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
assert req_pass.status_code == 200
|
<commit_before>import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def setup(self):
self.patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
def test_page_load(self, db_session):
db_session.add(self.patient1)
db_session.commit()
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
<commit_msg>Add Unit tests for views<commit_after>import pytest
from flask import url_for
from pytest_flask import fixtures
from mdt_app.models import *
@pytest.mark.usefixtures('client_class')
class TestIndex:
def test_page_load(self):
assert self.client.get(url_for('main.index')).status_code == 200
@pytest.mark.usefixtures('client_class')
class TestCaseCreate:
def test_setup(self, db_session):
patient1 = Patient(id=1, hospital_number=12345678,
first_name='test1', last_name='patient',
date_of_birth='1988-10-09', sex='F')
user = User()
consultant = User()
meeting = Meeting()
db_session.add(patient1)
db_session.commit()
def test_page_load(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
req_no_id = self.client.get(url_for('main.case_create', patient_id=''))
assert req_pass.status_code == 200
assert req_no_id.status_code == 404, 'no id, page not found'
def test_kept_in_db(self):
req_pass = self.client.get(url_for('main.case_create', patient_id=1))
assert req_pass.status_code == 200
|
a7a1d513003a65c5c9772ba75631247decff444d
|
utils/utils.py
|
utils/utils.py
|
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
current_site = get_current_site(request)
return add_domain(current_site.domain, path, request.is_secure())
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
|
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
"""Retrieve current site site
Always returns as http (never https)
"""
current_site = get_current_site(request)
site_url = add_domain(current_site.domain, path, request.is_secure())
return site_url.replace('https', 'http')
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
|
Make site url be http, not https
|
Make site url be http, not https
|
Python
|
bsd-3-clause
|
uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam
|
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
current_site = get_current_site(request)
return add_domain(current_site.domain, path, request.is_secure())
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
Make site url be http, not https
|
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
"""Retrieve current site site
Always returns as http (never https)
"""
current_site = get_current_site(request)
site_url = add_domain(current_site.domain, path, request.is_secure())
return site_url.replace('https', 'http')
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
|
<commit_before>from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
current_site = get_current_site(request)
return add_domain(current_site.domain, path, request.is_secure())
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
<commit_msg>Make site url be http, not https<commit_after>
|
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
"""Retrieve current site site
Always returns as http (never https)
"""
current_site = get_current_site(request)
site_url = add_domain(current_site.domain, path, request.is_secure())
return site_url.replace('https', 'http')
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
|
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
current_site = get_current_site(request)
return add_domain(current_site.domain, path, request.is_secure())
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
Make site url be http, not httpsfrom django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
"""Retrieve current site site
Always returns as http (never https)
"""
current_site = get_current_site(request)
site_url = add_domain(current_site.domain, path, request.is_secure())
return site_url.replace('https', 'http')
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
|
<commit_before>from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
current_site = get_current_site(request)
return add_domain(current_site.domain, path, request.is_secure())
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
<commit_msg>Make site url be http, not https<commit_after>from django.core.paginator import Paginator, EmptyPage, InvalidPage
from django.contrib.syndication.views import add_domain
from django.contrib.sites.models import get_current_site
def get_site_url(request, path):
"""Retrieve current site site
Always returns as http (never https)
"""
current_site = get_current_site(request)
site_url = add_domain(current_site.domain, path, request.is_secure())
return site_url.replace('https', 'http')
def do_paging(request, queryset):
paginator = Paginator(queryset, 25)
# Make sure page request is an int. If not, deliver first page.
try:
page = int(request.GET.get('page', '1'))
except ValueError:
page = 1
# If page request (9999) is out of range, deliver last page of results.
try:
objects = paginator.page(page)
except (EmptyPage, InvalidPage):
objects = paginator.page(paginator.num_pages)
return objects
|
bf82a1437d89f82d417b72cca6274dc35ac7d147
|
example/urls.py
|
example/urls.py
|
from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path('admin/', admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path("admin/", admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Fix correction to comply with black
|
Fix correction to comply with black
|
Python
|
bsd-3-clause
|
jonasundderwolf/django-localizedfields,jonasundderwolf/django-localizedfields
|
from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path('admin/', admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Fix correction to comply with black
|
from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path("admin/", admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path('admin/', admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Fix correction to comply with black<commit_after>
|
from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path("admin/", admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path('admin/', admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Fix correction to comply with blackfrom django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path("admin/", admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
<commit_before>from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path('admin/', admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Fix correction to comply with black<commit_after>from django.conf import settings
from django.conf.urls import static
from django.contrib import admin
from django.urls import path
admin.autodiscover()
urlpatterns = [
path("admin/", admin.site.urls),
]
urlpatterns += static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
6d635a94121a9038c7c5b80b9851a086e69728b6
|
scripts/wiggle_to_binned_array.py
|
scripts/wiggle_to_binned_array.py
|
#!/usr/bin/env python
"""
usage: %prog score_file out_file
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def read_scores( f ):
scores_by_chrom = dict()
return scores_by_chrom
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
|
#!/usr/bin/env python
"""
usage: %prog score_file out_file
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
|
Allow specifying compression type on command line.
|
Allow specifying compression type on command line.
|
Python
|
mit
|
uhjish/bx-python,uhjish/bx-python,uhjish/bx-python
|
#!/usr/bin/env python
"""
usage: %prog score_file out_file
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def read_scores( f ):
scores_by_chrom = dict()
return scores_by_chrom
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
Allow specifying compression type on command line.
|
#!/usr/bin/env python
"""
usage: %prog score_file out_file
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
|
<commit_before>#!/usr/bin/env python
"""
usage: %prog score_file out_file
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def read_scores( f ):
scores_by_chrom = dict()
return scores_by_chrom
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
<commit_msg>Allow specifying compression type on command line.<commit_after>
|
#!/usr/bin/env python
"""
usage: %prog score_file out_file
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
|
#!/usr/bin/env python
"""
usage: %prog score_file out_file
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def read_scores( f ):
scores_by_chrom = dict()
return scores_by_chrom
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
Allow specifying compression type on command line.#!/usr/bin/env python
"""
usage: %prog score_file out_file
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
|
<commit_before>#!/usr/bin/env python
"""
usage: %prog score_file out_file
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def read_scores( f ):
scores_by_chrom = dict()
return scores_by_chrom
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
<commit_msg>Allow specifying compression type on command line.<commit_after>#!/usr/bin/env python
"""
usage: %prog score_file out_file
-c, --comp=type: compression type (none, zlib, lzo)
"""
from __future__ import division
import sys
import psyco_full
import bx.wiggle
from bx.binned_array import BinnedArray
from fpconst import isNaN
import cookbook.doc_optparse
import misc
def main():
# Parse command line
options, args = cookbook.doc_optparse.parse( __doc__ )
try:
if options.comp:
comp_type = options.comp
else:
comp_type = None
score_fname = args[0]
out_fname = args[1]
except:
cookbook.doc_optparse.exit()
scores = BinnedArray()
## last_chrom = None
for i, ( chrom, pos, val ) in enumerate( bx.wiggle.Reader( misc.open_compressed( score_fname ) ) ):
#if last_chrom is None:
# last_chrom = chrom
#else:
# assert chrom == last_chrom, "This script expects a 'wiggle' input on only one chromosome"
scores[pos] = val
# Status
if i % 10000 == 0: print i, "scores processed"
out = open( out_fname, "w" )
if comp_type:
scores.to_file( out, comp_type=comp_type )
else:
scores.to_file( out )
out.close()
if __name__ == "__main__": main()
|
c3958b4aa7aca75f00f4de20e612f918a176ec96
|
extraction/api/validation.py
|
extraction/api/validation.py
|
from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = False
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
|
from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = True
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
|
Set default state of ValidatedDateItem to valid.
|
Set default state of ValidatedDateItem to valid.
|
Python
|
mit
|
slventures/jormungand
|
from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = False
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
Set default state of ValidatedDateItem to valid.
|
from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = True
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
|
<commit_before>from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = False
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
<commit_msg>Set default state of ValidatedDateItem to valid.<commit_after>
|
from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = True
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
|
from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = False
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
Set default state of ValidatedDateItem to valid.from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = True
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
|
<commit_before>from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = False
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
<commit_msg>Set default state of ValidatedDateItem to valid.<commit_after>from yapsy import IPlugin
from postprocessing import PostProcessedDataItem
__author__ = 'aj@springlab.co'
class ValidatedDataItem(PostProcessedDataItem):
"""
Overrides the PostProcessedDataItem class to provide an indication that a
PostProcessedDataItem instance has undergone validation
"""
def __init__(self, seq=None, **kwargs):
self.valid = True
super(ValidatedDataItem, self).__init__(seq, **kwargs)
class ValidationPluginInterface(IPlugin.IPlugin):
"""
Defines an interface for a plugin that validates the processed data items
"""
def can_validate(self, data_model_name, data_model):
"""
Determines whether the plugin can validate data associated with a given data model. Returns a bool.
"""
return False
def validate(self, data_items, data_model_name, data_model):
"""
For a given data model, processes a list of (UID value, ExtractedDataItem instance) tuples and validates
each ExtractedDataItem instance, transforming it into a ValidatedDataItem instance in the process.
Returns a list of (UID value, ValidatedDataItem instance) tuples.
"""
return []
|
93550cafa9ea38b663166cd5b1ae1fcf0e42f1d0
|
ukechord.py
|
ukechord.py
|
#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
if outfile == sys.stdout:
# TODO: This is a terrible hack to use sys.stdout in binary mode.
outfile = getattr(outfile, 'buffer', outfile)
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
|
Work around issue writing to sys.stdout in binary mode (Python3).
|
Work around issue writing to sys.stdout in binary mode (Python3).
For details, see:
https://mail.python.org/pipermail/stdlib-sig/2012-June/000953.html
|
Python
|
apache-2.0
|
gnoack/ukechord,gnoack/ukechord
|
#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
Work around issue writing to sys.stdout in binary mode (Python3).
For details, see:
https://mail.python.org/pipermail/stdlib-sig/2012-June/000953.html
|
#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
if outfile == sys.stdout:
# TODO: This is a terrible hack to use sys.stdout in binary mode.
outfile = getattr(outfile, 'buffer', outfile)
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
<commit_msg>Work around issue writing to sys.stdout in binary mode (Python3).
For details, see:
https://mail.python.org/pipermail/stdlib-sig/2012-June/000953.html<commit_after>
|
#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
if outfile == sys.stdout:
# TODO: This is a terrible hack to use sys.stdout in binary mode.
outfile = getattr(outfile, 'buffer', outfile)
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
Work around issue writing to sys.stdout in binary mode (Python3).
For details, see:
https://mail.python.org/pipermail/stdlib-sig/2012-June/000953.html#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
if outfile == sys.stdout:
# TODO: This is a terrible hack to use sys.stdout in binary mode.
outfile = getattr(outfile, 'buffer', outfile)
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
<commit_msg>Work around issue writing to sys.stdout in binary mode (Python3).
For details, see:
https://mail.python.org/pipermail/stdlib-sig/2012-June/000953.html<commit_after>#!/usr/bin/python2
"""Generate Ukulele song sheets with chords.
Input files are in ChordPro-ish format, output files in PDF format.
"""
import argparse
import sys
from reportlab.lib import pagesizes
import chordpro
import pdfwriter
def _parse_options(args):
"""Return (options, args)."""
parser = argparse.ArgumentParser(
usage="%(prog)s [-o OUTFILE] [INFILE]",
description=__doc__)
parser.add_argument("-o", "--output", dest="outfile",
nargs="?", default=sys.stdout,
type=argparse.FileType('wb'),
help="set output filename (default: stdout)")
parser.add_argument("infile", nargs="?", default=sys.stdin,
type=argparse.FileType('r'),
help="input filenames (default: stdin)")
return parser.parse_args(args)
def main(args):
args = _parse_options(args)
with args.outfile as outfile:
if outfile == sys.stdout:
# TODO: This is a terrible hack to use sys.stdout in binary mode.
outfile = getattr(outfile, 'buffer', outfile)
with args.infile as infile:
pdf_writer = pdfwriter.PdfWriter(outfile, pagesizes.A4)
chordpro.convert(infile, pdf_writer)
if __name__ == "__main__":
main(sys.argv[1:])
|
dc981dbd1b29d9586453f325f99b1c413c494800
|
account/managers.py
|
account/managers.py
|
from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address = self.create(user=user, email=email, **kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
|
from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address, __ = self.get_or_create(user=user, email=email, default=kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
|
Use get_or_create instead of just create
|
Use get_or_create instead of just create
|
Python
|
mit
|
gem/geonode-user-accounts,gem/geonode-user-accounts,gem/geonode-user-accounts
|
from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address = self.create(user=user, email=email, **kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
Use get_or_create instead of just create
|
from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address, __ = self.get_or_create(user=user, email=email, default=kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
|
<commit_before>from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address = self.create(user=user, email=email, **kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
<commit_msg>Use get_or_create instead of just create<commit_after>
|
from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address, __ = self.get_or_create(user=user, email=email, default=kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
|
from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address = self.create(user=user, email=email, **kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
Use get_or_create instead of just createfrom __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address, __ = self.get_or_create(user=user, email=email, default=kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
|
<commit_before>from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address = self.create(user=user, email=email, **kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
<commit_msg>Use get_or_create instead of just create<commit_after>from __future__ import unicode_literals
from django.db import models
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address, __ = self.get_or_create(user=user, email=email, default=kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
|
80df385acb9f39d0a5f01dc41954b7035ecafb2d
|
upnp_inspector/__init__.py
|
upnp_inspector/__init__.py
|
# -*- coding: utf-8 -*-
__version_info__ = (0, 2, 3)
__version__ = '%d.%d.%d' % __version_info__[:3]
|
# -*- coding: utf-8 -*-
__version__ = "0.3.dev0"
|
Switch to PEP 440 compliant version string and update to 0.3.dev0.
|
Switch to PEP 440 compliant version string and update to 0.3.dev0.
Update to 0.3.dev0 since this is the version already stated in the
NEWS file.
|
Python
|
mit
|
coherence-project/UPnP-Inspector
|
# -*- coding: utf-8 -*-
__version_info__ = (0, 2, 3)
__version__ = '%d.%d.%d' % __version_info__[:3]
Switch to PEP 440 compliant version string and update to 0.3.dev0.
Update to 0.3.dev0 since this is the version already stated in the
NEWS file.
|
# -*- coding: utf-8 -*-
__version__ = "0.3.dev0"
|
<commit_before># -*- coding: utf-8 -*-
__version_info__ = (0, 2, 3)
__version__ = '%d.%d.%d' % __version_info__[:3]
<commit_msg>Switch to PEP 440 compliant version string and update to 0.3.dev0.
Update to 0.3.dev0 since this is the version already stated in the
NEWS file.<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = "0.3.dev0"
|
# -*- coding: utf-8 -*-
__version_info__ = (0, 2, 3)
__version__ = '%d.%d.%d' % __version_info__[:3]
Switch to PEP 440 compliant version string and update to 0.3.dev0.
Update to 0.3.dev0 since this is the version already stated in the
NEWS file.# -*- coding: utf-8 -*-
__version__ = "0.3.dev0"
|
<commit_before># -*- coding: utf-8 -*-
__version_info__ = (0, 2, 3)
__version__ = '%d.%d.%d' % __version_info__[:3]
<commit_msg>Switch to PEP 440 compliant version string and update to 0.3.dev0.
Update to 0.3.dev0 since this is the version already stated in the
NEWS file.<commit_after># -*- coding: utf-8 -*-
__version__ = "0.3.dev0"
|
16c1352ecf8583615e482c431ec5183fdb718f67
|
split_file.py
|
split_file.py
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use."""
return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
|
Make splitting more robust to periods in strings
|
Make splitting more robust to periods in strings
|
Python
|
mit
|
JasonGross/coq-tools,JasonGross/coq-tools
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use."""
return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
Make splitting more robust to periods in strings
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
|
<commit_before>from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use."""
return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
<commit_msg>Make splitting more robust to periods in strings<commit_after>
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
|
from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use."""
return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
Make splitting more robust to periods in stringsfrom strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
|
<commit_before>from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use."""
return re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))
<commit_msg>Make splitting more robust to periods in strings<commit_after>from strip_comments import strip_comments
import re
__all__ = ["split_coq_file_contents"]
def merge_quotations(statements):
"""If there are an odd number of "s in a statement, assume that we
broke the middle of a string. We recombine that string."""
cur = None
for i in statements:
if i.count('"') % 2 != 0:
if cur is None:
cur = i
else:
yield (cur + ' ' + i)
cur = None
elif cur is None:
yield i
else:
cur += ' ' + i
def split_coq_file_contents(contents):
"""Splits the contents of a coq file into multiple statements.
This is done by finding one or three periods followed by
whitespace. This is a dumb algorithm, but it seems to be (nearly)
the one that ProofGeneral and CoqIDE use.
We additionally merge lines inside of quotations."""
return list(merge_quotations(re.split('(?<=[^\.]\.\.\.)\s|(?<=[^\.]\.)\s', strip_comments(contents))))
|
5f49fb8c7c0f9e7a05d4f9b730d7f3e872229d60
|
test/completion/definition.py
|
test/completion/definition.py
|
"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
|
"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstance( )
|
Add blackbox tests using column number
|
Add blackbox tests using column number
|
Python
|
mit
|
flurischt/jedi,WoLpH/jedi,mfussenegger/jedi,dwillmer/jedi,tjwei/jedi,jonashaag/jedi,jonashaag/jedi,tjwei/jedi,flurischt/jedi,dwillmer/jedi,mfussenegger/jedi,WoLpH/jedi
|
"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
Add blackbox tests using column number
|
"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstance( )
|
<commit_before>"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
<commit_msg>Add blackbox tests using column number<commit_after>
|
"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstance( )
|
"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
Add blackbox tests using column number"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstance( )
|
<commit_before>"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
<commit_msg>Add blackbox tests using column number<commit_after>"""
Fallback to callee definition when definition not found.
- https://github.com/davidhalter/jedi/issues/131
- https://github.com/davidhalter/jedi/pull/149
"""
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstance( )
|
192e24cdafff2bb780ef9cc87853c48e9e41cb4a
|
stationspinner/accounting/management/commands/characters.py
|
stationspinner/accounting/management/commands/characters.py
|
from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('CharacterID\t\t {0} APIKey\t\t {1}'.format(char.pk, char.owner_key.pk))
|
from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('{0}\t\tCharacterID\t\t {1} APIKey\t\t {2}'.format(char.name,
char.pk,
char.owner_key.pk))
|
Simplify the output for copypaste
|
Simplify the output for copypaste
|
Python
|
agpl-3.0
|
kriberg/stationspinner,kriberg/stationspinner
|
from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('CharacterID\t\t {0} APIKey\t\t {1}'.format(char.pk, char.owner_key.pk))Simplify the output for copypaste
|
from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('{0}\t\tCharacterID\t\t {1} APIKey\t\t {2}'.format(char.name,
char.pk,
char.owner_key.pk))
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('CharacterID\t\t {0} APIKey\t\t {1}'.format(char.pk, char.owner_key.pk))<commit_msg>Simplify the output for copypaste<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('{0}\t\tCharacterID\t\t {1} APIKey\t\t {2}'.format(char.name,
char.pk,
char.owner_key.pk))
|
from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('CharacterID\t\t {0} APIKey\t\t {1}'.format(char.pk, char.owner_key.pk))Simplify the output for copypastefrom django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('{0}\t\tCharacterID\t\t {1} APIKey\t\t {2}'.format(char.name,
char.pk,
char.owner_key.pk))
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('CharacterID\t\t {0} APIKey\t\t {1}'.format(char.pk, char.owner_key.pk))<commit_msg>Simplify the output for copypaste<commit_after>from django.core.management.base import BaseCommand, CommandError
from stationspinner.character.models import CharacterSheet
class Command(BaseCommand):
help = 'Lists all enabled characterIDs with their APIKey PKs. Handy for sending tasks'
def handle(self, *args, **options):
characters = CharacterSheet.objects.filter(enabled=True)
for char in characters:
self.stdout.write('{0}\t\tCharacterID\t\t {1} APIKey\t\t {2}'.format(char.name,
char.pk,
char.owner_key.pk))
|
e0de6546fb58af113d18cf7e836407e3f8a1a985
|
contrib/bosco/bosco-cluster-remote-hosts.py
|
contrib/bosco/bosco-cluster-remote-hosts.py
|
#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
import htcondor
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
|
#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
|
Delete unused import htcondor (SOFTWARE-4687)
|
Delete unused import htcondor (SOFTWARE-4687)
|
Python
|
apache-2.0
|
brianhlin/htcondor-ce,matyasselmeci/htcondor-ce,matyasselmeci/htcondor-ce,brianhlin/htcondor-ce,matyasselmeci/htcondor-ce,brianhlin/htcondor-ce
|
#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
import htcondor
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
Delete unused import htcondor (SOFTWARE-4687)
|
#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
|
<commit_before>#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
import htcondor
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
<commit_msg>Delete unused import htcondor (SOFTWARE-4687)<commit_after>
|
#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
|
#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
import htcondor
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
Delete unused import htcondor (SOFTWARE-4687)#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
|
<commit_before>#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
import htcondor
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
<commit_msg>Delete unused import htcondor (SOFTWARE-4687)<commit_after>#!/usr/bin/python3
import os
import subprocess
import sys
try:
import classad
except ImportError:
sys.exit("ERROR: Could not load HTCondor Python bindings. "
"Ensure the 'htcondor' and 'classad' are in PYTHONPATH")
jre = classad.parseAds('JOB_ROUTER_ENTRIES')
grs = ( x["GridResource"] for x in jre )
rhosts = ( x.split()[1:3] for x in grs )
for batchtype, rhost in rhosts:
subprocess.call(['bosco_cluster', '-o', os.getenv("OVERRIDE_DIR"),
rhost, batchtype])
|
ded6f27721e54f2c7dab3016209927678d85b90d
|
aldryn_faq/forms.py
|
aldryn_faq/forms.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
def clean_slug(self):
slug = self.cleaned_data['slug']
translations_model = Category._meta.translations_model
categories_with_slug = translations_model.objects.filter(slug=slug)
if self.instance.pk:
# Make sure to exclude references from this master :)
categories_with_slug = categories_with_slug.exclude(
master_id=self.instance.pk)
if categories_with_slug.exists():
raise forms.ValidationError(
'A category with this slug already exists.')
return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
# def clean_slug(self):
# slug = self.cleaned_data['slug']
# translations_model = Category._meta.translations_model
# categories_with_slug = translations_model.objects.filter(slug=slug)
# if self.instance.pk:
# # Make sure to exclude references from this master :)
# categories_with_slug = categories_with_slug.exclude(
# master_id=self.instance.pk)
# if categories_with_slug.exists():
# raise forms.ValidationError(
# 'A category with this slug already exists.')
# return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
|
Remove no longer used code
|
Remove no longer used code
|
Python
|
bsd-3-clause
|
czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq,czpython/aldryn-faq
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
def clean_slug(self):
slug = self.cleaned_data['slug']
translations_model = Category._meta.translations_model
categories_with_slug = translations_model.objects.filter(slug=slug)
if self.instance.pk:
# Make sure to exclude references from this master :)
categories_with_slug = categories_with_slug.exclude(
master_id=self.instance.pk)
if categories_with_slug.exists():
raise forms.ValidationError(
'A category with this slug already exists.')
return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
Remove no longer used code
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
# def clean_slug(self):
# slug = self.cleaned_data['slug']
# translations_model = Category._meta.translations_model
# categories_with_slug = translations_model.objects.filter(slug=slug)
# if self.instance.pk:
# # Make sure to exclude references from this master :)
# categories_with_slug = categories_with_slug.exclude(
# master_id=self.instance.pk)
# if categories_with_slug.exists():
# raise forms.ValidationError(
# 'A category with this slug already exists.')
# return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
def clean_slug(self):
slug = self.cleaned_data['slug']
translations_model = Category._meta.translations_model
categories_with_slug = translations_model.objects.filter(slug=slug)
if self.instance.pk:
# Make sure to exclude references from this master :)
categories_with_slug = categories_with_slug.exclude(
master_id=self.instance.pk)
if categories_with_slug.exists():
raise forms.ValidationError(
'A category with this slug already exists.')
return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
<commit_msg>Remove no longer used code<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
# def clean_slug(self):
# slug = self.cleaned_data['slug']
# translations_model = Category._meta.translations_model
# categories_with_slug = translations_model.objects.filter(slug=slug)
# if self.instance.pk:
# # Make sure to exclude references from this master :)
# categories_with_slug = categories_with_slug.exclude(
# master_id=self.instance.pk)
# if categories_with_slug.exists():
# raise forms.ValidationError(
# 'A category with this slug already exists.')
# return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
def clean_slug(self):
slug = self.cleaned_data['slug']
translations_model = Category._meta.translations_model
categories_with_slug = translations_model.objects.filter(slug=slug)
if self.instance.pk:
# Make sure to exclude references from this master :)
categories_with_slug = categories_with_slug.exclude(
master_id=self.instance.pk)
if categories_with_slug.exists():
raise forms.ValidationError(
'A category with this slug already exists.')
return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
Remove no longer used code# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
# def clean_slug(self):
# slug = self.cleaned_data['slug']
# translations_model = Category._meta.translations_model
# categories_with_slug = translations_model.objects.filter(slug=slug)
# if self.instance.pk:
# # Make sure to exclude references from this master :)
# categories_with_slug = categories_with_slug.exclude(
# master_id=self.instance.pk)
# if categories_with_slug.exists():
# raise forms.ValidationError(
# 'A category with this slug already exists.')
# return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
def clean_slug(self):
slug = self.cleaned_data['slug']
translations_model = Category._meta.translations_model
categories_with_slug = translations_model.objects.filter(slug=slug)
if self.instance.pk:
# Make sure to exclude references from this master :)
categories_with_slug = categories_with_slug.exclude(
master_id=self.instance.pk)
if categories_with_slug.exists():
raise forms.ValidationError(
'A category with this slug already exists.')
return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
<commit_msg>Remove no longer used code<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import forms
from parler.forms import TranslatableModelForm
from sortedm2m.forms import SortedMultipleChoiceField
from .models import Category, QuestionListPlugin, Question
class CategoryAdminForm(TranslatableModelForm):
class Meta:
model = Category
# def clean_slug(self):
# slug = self.cleaned_data['slug']
# translations_model = Category._meta.translations_model
# categories_with_slug = translations_model.objects.filter(slug=slug)
# if self.instance.pk:
# # Make sure to exclude references from this master :)
# categories_with_slug = categories_with_slug.exclude(
# master_id=self.instance.pk)
# if categories_with_slug.exists():
# raise forms.ValidationError(
# 'A category with this slug already exists.')
# return slug
class QuestionListPluginForm(forms.ModelForm):
questions = SortedMultipleChoiceField(queryset=Question.objects.none())
class Meta:
model = QuestionListPlugin
def __init__(self, *args, **kwargs):
super(QuestionListPluginForm, self).__init__(*args, **kwargs)
questions_field = self.fields['questions']
questions_field.queryset = Question.objects.language()
|
db134f36ae54ef135037d0c912068fc678df54cf
|
examples/controllers.py
|
examples/controllers.py
|
#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1' )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
|
#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1', port=6633 )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
|
Allow RemoteController to connect to correct port.
|
Allow RemoteController to connect to correct port.
Fixes #584
|
Python
|
bsd-3-clause
|
mininet/mininet,mininet/mininet,mininet/mininet
|
#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1' )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
Allow RemoteController to connect to correct port.
Fixes #584
|
#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1', port=6633 )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
|
<commit_before>#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1' )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
<commit_msg>Allow RemoteController to connect to correct port.
Fixes #584<commit_after>
|
#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1', port=6633 )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
|
#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1' )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
Allow RemoteController to connect to correct port.
Fixes #584#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1', port=6633 )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
|
<commit_before>#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1' )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
<commit_msg>Allow RemoteController to connect to correct port.
Fixes #584<commit_after>#!/usr/bin/python
"""
Create a network where different switches are connected to
different controllers, by creating a custom Switch() subclass.
"""
from mininet.net import Mininet
from mininet.node import OVSSwitch, Controller, RemoteController
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.cli import CLI
setLogLevel( 'info' )
# Two local and one "external" controller (which is actually c0)
# Ignore the warning message that the remote isn't (yet) running
c0 = Controller( 'c0', port=6633 )
c1 = Controller( 'c1', port=6634 )
c2 = RemoteController( 'c2', ip='127.0.0.1', port=6633 )
cmap = { 's1': c0, 's2': c1, 's3': c2 }
class MultiSwitch( OVSSwitch ):
"Custom Switch() subclass that connects to different controllers"
def start( self, controllers ):
return OVSSwitch.start( self, [ cmap[ self.name ] ] )
topo = TreeTopo( depth=2, fanout=2 )
net = Mininet( topo=topo, switch=MultiSwitch, build=False )
for c in [ c0, c1 ]:
net.addController(c)
net.build()
net.start()
CLI( net )
net.stop()
|
d9db4735a1c879e967af5fff30c8322ea3f5121a
|
hackfmi/urls.py
|
hackfmi/urls.py
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^search/(?P<name>\w+)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
|
Add url for searching user by name
|
Add url for searching user by name
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
Add url for searching user by name
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^search/(?P<name>\w+)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
<commit_msg>Add url for searching user by name<commit_after>
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^search/(?P<name>\w+)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
|
from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
Add url for searching user by namefrom django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^search/(?P<name>\w+)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
|
<commit_before>from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
<commit_msg>Add url for searching user by name<commit_after>from django.conf.urls import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from members import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.homepage, name='homepage'),
# Examples:
# url(r'^$', 'hackfmi.views.home', name='home'),
# url(r'^hackfmi/', include('hackfmi.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^search/(?P<name>\w+)/$', 'members.views.search', name='search'),
url(r'^protocols/add/$', 'protocols.views.add', name='add-protocol'),
url(r'^projects/add/$', 'projects.views.add_project', name='add-project'),
url(r'^reports/add/$', 'reports.views.add_report', name='add-report'),
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,}),)
|
9dab9c08c57ab0548beaa32765a2f064d2ec6544
|
tests/app/test_application.py
|
tests/app/test_application.py
|
"""
Tests for the application infrastructure
"""
from flask import json
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
|
"""
Tests for the application infrastructure
"""
import mock
import pytest
from flask import json
from elasticsearch.exceptions import ConnectionError
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
@mock.patch('elasticsearch.transport.Urllib3HttpConnection.perform_request', side_effect=ConnectionError(500))
def test_elastic_search_client_performs_retries_on_connection_error(self, perform_request):
with pytest.raises(ConnectionError):
self.client.get('/')
# FlaskElasticsearch attaches the es client to the context in flask_elasticsearch.py
from flask import _app_ctx_stack
assert perform_request.call_count == 1 + _app_ctx_stack.top.elasticsearch.transport.max_retries
assert perform_request.call_count == 1 + 3
|
Add a test to indicate/ ensure that flask is performing retries …
|
Add a test to indicate/ ensure that flask is performing retries …
The retry functionality is buried in the elasticsearch.transport.Transport class and
can be effected by passing max_retries to the elasticsearch.client.ElasticSearch object
(https://github.com/elastic/elasticsearch-py/blob/master/elasticsearch/client/__init__.py#L184)
or the flask_elasticsearch.FlaskElasticsearch.init_app method. This test is an attempt
to surface this behaviour.
|
Python
|
mit
|
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api
|
"""
Tests for the application infrastructure
"""
from flask import json
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
Add a test to indicate/ ensure that flask is performing retries …
The retry functionality is buried in the elasticsearch.transport.Transport class and
can be effected by passing max_retries to the elasticsearch.client.ElasticSearch object
(https://github.com/elastic/elasticsearch-py/blob/master/elasticsearch/client/__init__.py#L184)
or the flask_elasticsearch.FlaskElasticsearch.init_app method. This test is an attempt
to surface this behaviour.
|
"""
Tests for the application infrastructure
"""
import mock
import pytest
from flask import json
from elasticsearch.exceptions import ConnectionError
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
@mock.patch('elasticsearch.transport.Urllib3HttpConnection.perform_request', side_effect=ConnectionError(500))
def test_elastic_search_client_performs_retries_on_connection_error(self, perform_request):
with pytest.raises(ConnectionError):
self.client.get('/')
# FlaskElasticsearch attaches the es client to the context in flask_elasticsearch.py
from flask import _app_ctx_stack
assert perform_request.call_count == 1 + _app_ctx_stack.top.elasticsearch.transport.max_retries
assert perform_request.call_count == 1 + 3
|
<commit_before>"""
Tests for the application infrastructure
"""
from flask import json
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
<commit_msg>Add a test to indicate/ ensure that flask is performing retries …
The retry functionality is buried in the elasticsearch.transport.Transport class and
can be effected by passing max_retries to the elasticsearch.client.ElasticSearch object
(https://github.com/elastic/elasticsearch-py/blob/master/elasticsearch/client/__init__.py#L184)
or the flask_elasticsearch.FlaskElasticsearch.init_app method. This test is an attempt
to surface this behaviour.<commit_after>
|
"""
Tests for the application infrastructure
"""
import mock
import pytest
from flask import json
from elasticsearch.exceptions import ConnectionError
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
@mock.patch('elasticsearch.transport.Urllib3HttpConnection.perform_request', side_effect=ConnectionError(500))
def test_elastic_search_client_performs_retries_on_connection_error(self, perform_request):
with pytest.raises(ConnectionError):
self.client.get('/')
# FlaskElasticsearch attaches the es client to the context in flask_elasticsearch.py
from flask import _app_ctx_stack
assert perform_request.call_count == 1 + _app_ctx_stack.top.elasticsearch.transport.max_retries
assert perform_request.call_count == 1 + 3
|
"""
Tests for the application infrastructure
"""
from flask import json
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
Add a test to indicate/ ensure that flask is performing retries …
The retry functionality is buried in the elasticsearch.transport.Transport class and
can be effected by passing max_retries to the elasticsearch.client.ElasticSearch object
(https://github.com/elastic/elasticsearch-py/blob/master/elasticsearch/client/__init__.py#L184)
or the flask_elasticsearch.FlaskElasticsearch.init_app method. This test is an attempt
to surface this behaviour."""
Tests for the application infrastructure
"""
import mock
import pytest
from flask import json
from elasticsearch.exceptions import ConnectionError
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
@mock.patch('elasticsearch.transport.Urllib3HttpConnection.perform_request', side_effect=ConnectionError(500))
def test_elastic_search_client_performs_retries_on_connection_error(self, perform_request):
with pytest.raises(ConnectionError):
self.client.get('/')
# FlaskElasticsearch attaches the es client to the context in flask_elasticsearch.py
from flask import _app_ctx_stack
assert perform_request.call_count == 1 + _app_ctx_stack.top.elasticsearch.transport.max_retries
assert perform_request.call_count == 1 + 3
|
<commit_before>"""
Tests for the application infrastructure
"""
from flask import json
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
<commit_msg>Add a test to indicate/ ensure that flask is performing retries …
The retry functionality is buried in the elasticsearch.transport.Transport class and
can be effected by passing max_retries to the elasticsearch.client.ElasticSearch object
(https://github.com/elastic/elasticsearch-py/blob/master/elasticsearch/client/__init__.py#L184)
or the flask_elasticsearch.FlaskElasticsearch.init_app method. This test is an attempt
to surface this behaviour.<commit_after>"""
Tests for the application infrastructure
"""
import mock
import pytest
from flask import json
from elasticsearch.exceptions import ConnectionError
from nose.tools import assert_equal
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
assert 'links' in json.loads(response.get_data())
def test_404(self):
response = self.client.get('/index/type/search')
assert 404 == response.status_code
def test_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get('/')
assert 401 == response.status_code
assert 'WWW-Authenticate' in response.headers
def test_invalid_bearer_token_is_required(self):
self.do_not_provide_access_token()
response = self.client.get(
'/',
headers={'Authorization': 'Bearer invalid-token'})
assert 403 == response.status_code
def test_ttl_is_not_set(self):
response = self.client.get('/')
assert_equal(None, response.cache_control.max_age)
@mock.patch('elasticsearch.transport.Urllib3HttpConnection.perform_request', side_effect=ConnectionError(500))
def test_elastic_search_client_performs_retries_on_connection_error(self, perform_request):
with pytest.raises(ConnectionError):
self.client.get('/')
# FlaskElasticsearch attaches the es client to the context in flask_elasticsearch.py
from flask import _app_ctx_stack
assert perform_request.call_count == 1 + _app_ctx_stack.top.elasticsearch.transport.max_retries
assert perform_request.call_count == 1 + 3
|
a2b4b732c15c3cfefb345354bca8fc6de47d4820
|
appengine_config.py
|
appengine_config.py
|
"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')
|
"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')
import os
# Called only if the current namespace is not set.
def namespace_manager_default_namespace_for_request():
# The returned string will be used as the Google Apps domain.
applicationVersion="Default"
if "CURRENT_VERSION_ID" in os.environ:
applicationVersion = os.environ["CURRENT_VERSION_ID"].split('.')[0]
return applicationVersion
|
Enable NDB Shared memory namespace partioning using engine Version ID
|
Enable NDB Shared memory namespace partioning using engine Version ID
|
Python
|
apache-2.0
|
dbs/schemaorg,vholland/schemaorg,schemaorg/schemaorg,vholland/schemaorg,tfrancart/schemaorg,schemaorg/schemaorg,unor/schemaorg,schemaorg/schemaorg,dbs/schemaorg,vholland/schemaorg,dbs/schemaorg,tfrancart/schemaorg,tfrancart/schemaorg,vholland/schemaorg,schemaorg/schemaorg,dbs/schemaorg,tfrancart/schemaorg,unor/schemaorg,schemaorg/schemaorg,unor/schemaorg
|
"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')Enable NDB Shared memory namespace partioning using engine Version ID
|
"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')
import os
# Called only if the current namespace is not set.
def namespace_manager_default_namespace_for_request():
# The returned string will be used as the Google Apps domain.
applicationVersion="Default"
if "CURRENT_VERSION_ID" in os.environ:
applicationVersion = os.environ["CURRENT_VERSION_ID"].split('.')[0]
return applicationVersion
|
<commit_before>"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')<commit_msg>Enable NDB Shared memory namespace partioning using engine Version ID<commit_after>
|
"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')
import os
# Called only if the current namespace is not set.
def namespace_manager_default_namespace_for_request():
# The returned string will be used as the Google Apps domain.
applicationVersion="Default"
if "CURRENT_VERSION_ID" in os.environ:
applicationVersion = os.environ["CURRENT_VERSION_ID"].split('.')[0]
return applicationVersion
|
"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')Enable NDB Shared memory namespace partioning using engine Version ID"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')
import os
# Called only if the current namespace is not set.
def namespace_manager_default_namespace_for_request():
# The returned string will be used as the Google Apps domain.
applicationVersion="Default"
if "CURRENT_VERSION_ID" in os.environ:
applicationVersion = os.environ["CURRENT_VERSION_ID"].split('.')[0]
return applicationVersion
|
<commit_before>"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')<commit_msg>Enable NDB Shared memory namespace partioning using engine Version ID<commit_after>"""`appengine_config` gets loaded when starting a new application instance."""
import vendor
# insert `lib` as a site directory so our `main` module can load
# third-party libraries, and override built-ins with newer
# versions.
vendor.add('lib')
import os
# Called only if the current namespace is not set.
def namespace_manager_default_namespace_for_request():
# The returned string will be used as the Google Apps domain.
applicationVersion="Default"
if "CURRENT_VERSION_ID" in os.environ:
applicationVersion = os.environ["CURRENT_VERSION_ID"].split('.')[0]
return applicationVersion
|
ed6146566d57105af88855c6b8668b4f76e98dbf
|
xmanager/xm_local/__init__.py
|
xmanager/xm_local/__init__.py
|
# Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import Caip
from xmanager.xm_local.executors import Kubernetes
from xmanager.xm_local.executors import Local
from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
|
# Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import *
create_experiment = experiment.create_experiment
|
Make `DockerOptions` part of the `xm_local` module
|
Make `DockerOptions` part of the `xm_local` module
PiperOrigin-RevId: 376139511
Change-Id: Ia0ec1337b9ef2c175dea6b0c45e0a99b285d2b31
GitOrigin-RevId: 799d3ef6a98a6e4922b0b60c190c0d82cd538548
|
Python
|
apache-2.0
|
deepmind/xmanager,deepmind/xmanager
|
# Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import Caip
from xmanager.xm_local.executors import Kubernetes
from xmanager.xm_local.executors import Local
from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
Make `DockerOptions` part of the `xm_local` module
PiperOrigin-RevId: 376139511
Change-Id: Ia0ec1337b9ef2c175dea6b0c45e0a99b285d2b31
GitOrigin-RevId: 799d3ef6a98a6e4922b0b60c190c0d82cd538548
|
# Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import *
create_experiment = experiment.create_experiment
|
<commit_before># Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import Caip
from xmanager.xm_local.executors import Kubernetes
from xmanager.xm_local.executors import Local
from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
<commit_msg>Make `DockerOptions` part of the `xm_local` module
PiperOrigin-RevId: 376139511
Change-Id: Ia0ec1337b9ef2c175dea6b0c45e0a99b285d2b31
GitOrigin-RevId: 799d3ef6a98a6e4922b0b60c190c0d82cd538548<commit_after>
|
# Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import *
create_experiment = experiment.create_experiment
|
# Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import Caip
from xmanager.xm_local.executors import Kubernetes
from xmanager.xm_local.executors import Local
from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
Make `DockerOptions` part of the `xm_local` module
PiperOrigin-RevId: 376139511
Change-Id: Ia0ec1337b9ef2c175dea6b0c45e0a99b285d2b31
GitOrigin-RevId: 799d3ef6a98a6e4922b0b60c190c0d82cd538548# Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import *
create_experiment = experiment.create_experiment
|
<commit_before># Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import Caip
from xmanager.xm_local.executors import Kubernetes
from xmanager.xm_local.executors import Local
from xmanager.xm_local.executors import TensorboardCapability
create_experiment = experiment.create_experiment
<commit_msg>Make `DockerOptions` part of the `xm_local` module
PiperOrigin-RevId: 376139511
Change-Id: Ia0ec1337b9ef2c175dea6b0c45e0a99b285d2b31
GitOrigin-RevId: 799d3ef6a98a6e4922b0b60c190c0d82cd538548<commit_after># Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implementation of the XManager Launch API within the local scheduler."""
from xmanager.xm_local import experiment
from xmanager.xm_local.executors import *
create_experiment = experiment.create_experiment
|
55bc355fc97eb5e034e86e7c55919d8cca0edb2b
|
feincms/context_processors.py
|
feincms/context_processors.py
|
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
def appcontent_parameters(request):
# Remove in FeinCMS 1.4.
return {}
|
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
|
Remove deprecated appcontent_parameters context processor
|
Remove deprecated appcontent_parameters context processor
It did nothing for some time anyway.
|
Python
|
bsd-3-clause
|
matthiask/feincms2-content,mjl/feincms,feincms/feincms,joshuajonah/feincms,matthiask/feincms2-content,matthiask/feincms2-content,joshuajonah/feincms,matthiask/django-content-editor,michaelkuty/feincms,pjdelport/feincms,nickburlett/feincms,michaelkuty/feincms,michaelkuty/feincms,feincms/feincms,feincms/feincms,matthiask/django-content-editor,mjl/feincms,nickburlett/feincms,pjdelport/feincms,nickburlett/feincms,nickburlett/feincms,joshuajonah/feincms,michaelkuty/feincms,matthiask/django-content-editor,mjl/feincms,joshuajonah/feincms,pjdelport/feincms,matthiask/django-content-editor
|
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
def appcontent_parameters(request):
# Remove in FeinCMS 1.4.
return {}
Remove deprecated appcontent_parameters context processor
It did nothing for some time anyway.
|
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
|
<commit_before>from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
def appcontent_parameters(request):
# Remove in FeinCMS 1.4.
return {}
<commit_msg>Remove deprecated appcontent_parameters context processor
It did nothing for some time anyway.<commit_after>
|
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
|
from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
def appcontent_parameters(request):
# Remove in FeinCMS 1.4.
return {}
Remove deprecated appcontent_parameters context processor
It did nothing for some time anyway.from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
|
<commit_before>from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
def appcontent_parameters(request):
# Remove in FeinCMS 1.4.
return {}
<commit_msg>Remove deprecated appcontent_parameters context processor
It did nothing for some time anyway.<commit_after>from feincms.module.page.models import Page
def add_page_if_missing(request):
"""
If this attribute exists, then a page object has been registered already
by some other part of the code. We let it decide which page object it
wants to pass into the template
"""
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.from_request(request, best_match=True),
}
except Page.DoesNotExist:
return {}
|
e96c1e6fc5cc64ccc9e8ba8c91285ce4feb90a7c
|
workflow-RNA-Seq_Salmon.py
|
workflow-RNA-Seq_Salmon.py
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmon_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmonEM_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
Update script for explicit call to EM algorithm optimization
|
Update script for explicit call to EM algorithm optimization
|
Python
|
mit
|
dgaston/ddb-ngsflow-scripts,GastonLab/ddb-scripts,dgaston/ddb-scripts
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmon_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
Update script for explicit call to EM algorithm optimization
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmonEM_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
<commit_before>#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmon_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
<commit_msg>Update script for explicit call to EM algorithm optimization<commit_after>
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmonEM_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmon_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
Update script for explicit call to EM algorithm optimization#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmonEM_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
<commit_before>#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmon_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
<commit_msg>Update script for explicit call to EM algorithm optimization<commit_after>#!/usr/bin/env python
# Standard packages
import sys
import argparse
# Third-party packages
from toil.job import Job
# Package methods
from ddb import configuration
from ddb_ngsflow import pipeline
from ddb_ngsflow.rna import salmon
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--samples_file', help="Input configuration file for samples")
parser.add_argument('-c', '--configuration', help="Configuration file for various settings")
Job.Runner.addToilOptions(parser)
args = parser.parse_args()
args.logLevel = "INFO"
sys.stdout.write("Parsing configuration data\n")
config = configuration.configure_runtime(args.configuration)
sys.stdout.write("Parsing sample data\n")
samples = configuration.configure_samples(args.samples_file, config)
# Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG)
root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1)
# Per sample jobs
for sample in samples:
# Alignment and Refinement Stages
align_job = Job.wrapJobFn(salmon.salmonEM_paired, config, sample, samples,
cores=int(config['salmon']['num_cores']),
memory="{}G".format(config['salmon']['max_mem']))
# Create workflow from created jobs
root_job.addChild(align_job)
# Start workflow execution
Job.Runner.startToil(root_job, args)
|
3170407aaaeffbc76e31e5fc78d4dacd008e27d2
|
backbone_calendar/ajax/mixins.py
|
backbone_calendar/ajax/mixins.py
|
from django import http
from django.utils import simplejson as json
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
|
import json
from django import http
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
|
Use json and not simplejson
|
Use json and not simplejson
|
Python
|
agpl-3.0
|
rezometz/django-backbone-calendar,rezometz/django-backbone-calendar,rezometz/django-backbone-calendar
|
from django import http
from django.utils import simplejson as json
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
Use json and not simplejson
|
import json
from django import http
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
|
<commit_before>from django import http
from django.utils import simplejson as json
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
<commit_msg>Use json and not simplejson<commit_after>
|
import json
from django import http
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
|
from django import http
from django.utils import simplejson as json
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
Use json and not simplejsonimport json
from django import http
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
|
<commit_before>from django import http
from django.utils import simplejson as json
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
<commit_msg>Use json and not simplejson<commit_after>import json
from django import http
class JSONResponseMixin(object):
context_variable = 'object_list'
def render_to_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def dispatch(self, *args, **kwargs):
return super(JSONResponseMixin, self).dispatch(*args, **kwargs)
def post(self, *args, **kwargs):
return self.get(self, *args, **kwargs)
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
# Note: This is *EXTREMELY* naive; in reality, you'll need
# to do much more complex handling to ensure that arbitrary
# objects -- such as Django model instances or querysets
# -- can be serialized as JSON.
if self.context_variable is not None:
return json.dumps(context.get(self.context_variable, None))
return json.dumps(context)
|
52fddb061bf5f282da75df4462dd735d9fdc041a
|
sgfs/actions/create_structure.py
|
sgfs/actions/create_structure.py
|
from sgfs import SGFS
from sgactions.utils import notify
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title='Preview Folders' if dry_run else 'Creating Folders',
message='\n'.join(commands) or 'Everything is up to date.',
)
|
from sgfs import SGFS
from sgactions.utils import notify, progress
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
title='Preview Folders' if dry_run else 'Creating Folders'
progress(title=title, message='Running; please wait...')
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title=title,
message='\n'.join(commands) or 'Everything is up to date.',
)
|
Use new sgactions progress dialog
|
Use new sgactions progress dialog
|
Python
|
bsd-3-clause
|
westernx/sgfs,westernx/sgfs
|
from sgfs import SGFS
from sgactions.utils import notify
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title='Preview Folders' if dry_run else 'Creating Folders',
message='\n'.join(commands) or 'Everything is up to date.',
)
Use new sgactions progress dialog
|
from sgfs import SGFS
from sgactions.utils import notify, progress
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
title='Preview Folders' if dry_run else 'Creating Folders'
progress(title=title, message='Running; please wait...')
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title=title,
message='\n'.join(commands) or 'Everything is up to date.',
)
|
<commit_before>from sgfs import SGFS
from sgactions.utils import notify
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title='Preview Folders' if dry_run else 'Creating Folders',
message='\n'.join(commands) or 'Everything is up to date.',
)
<commit_msg>Use new sgactions progress dialog<commit_after>
|
from sgfs import SGFS
from sgactions.utils import notify, progress
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
title='Preview Folders' if dry_run else 'Creating Folders'
progress(title=title, message='Running; please wait...')
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title=title,
message='\n'.join(commands) or 'Everything is up to date.',
)
|
from sgfs import SGFS
from sgactions.utils import notify
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title='Preview Folders' if dry_run else 'Creating Folders',
message='\n'.join(commands) or 'Everything is up to date.',
)
Use new sgactions progress dialogfrom sgfs import SGFS
from sgactions.utils import notify, progress
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
title='Preview Folders' if dry_run else 'Creating Folders'
progress(title=title, message='Running; please wait...')
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title=title,
message='\n'.join(commands) or 'Everything is up to date.',
)
|
<commit_before>from sgfs import SGFS
from sgactions.utils import notify
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title='Preview Folders' if dry_run else 'Creating Folders',
message='\n'.join(commands) or 'Everything is up to date.',
)
<commit_msg>Use new sgactions progress dialog<commit_after>from sgfs import SGFS
from sgactions.utils import notify, progress
def run_create(**kwargs):
_run(False, **kwargs)
def run_preview(**kwargs):
_run(True, **kwargs)
def _run(dry_run, entity_type, selected_ids, **kwargs):
title='Preview Folders' if dry_run else 'Creating Folders'
progress(title=title, message='Running; please wait...')
sgfs = SGFS()
entities = sgfs.session.merge([dict(type=entity_type, id=id_) for id_ in selected_ids])
heirarchy = sgfs.session.fetch_heirarchy(entities)
sgfs.session.fetch_core(heirarchy)
commands = sgfs.create_structure(entities, dry_run=dry_run)
notify(
title=title,
message='\n'.join(commands) or 'Everything is up to date.',
)
|
3b15911c669d072bee1a171696636162d23bd07e
|
spec/openpassword/config_spec.py
|
spec/openpassword/config_spec.py
|
from nose.tools import assert_equals
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
assert_equals(cfg.get_path(), "path/to/keychain")
|
from nose.tools import *
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
eq_(cfg.get_path(), "path/to/keychain")
|
Update config test to use eq_ matcher
|
Update config test to use eq_ matcher
|
Python
|
mit
|
openpassword/blimey,openpassword/blimey
|
from nose.tools import assert_equals
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
assert_equals(cfg.get_path(), "path/to/keychain")
Update config test to use eq_ matcher
|
from nose.tools import *
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
eq_(cfg.get_path(), "path/to/keychain")
|
<commit_before>from nose.tools import assert_equals
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
assert_equals(cfg.get_path(), "path/to/keychain")
<commit_msg>Update config test to use eq_ matcher<commit_after>
|
from nose.tools import *
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
eq_(cfg.get_path(), "path/to/keychain")
|
from nose.tools import assert_equals
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
assert_equals(cfg.get_path(), "path/to/keychain")
Update config test to use eq_ matcherfrom nose.tools import *
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
eq_(cfg.get_path(), "path/to/keychain")
|
<commit_before>from nose.tools import assert_equals
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
assert_equals(cfg.get_path(), "path/to/keychain")
<commit_msg>Update config test to use eq_ matcher<commit_after>from nose.tools import *
from openpassword.config import Config
class ConfigSpec:
def it_sets_the_path_to_the_keychain(self):
cfg = Config()
cfg.set_path("path/to/keychain")
eq_(cfg.get_path(), "path/to/keychain")
|
c073131ac4b951affdac454824bb3eed913cd931
|
huxley/api/tests/committee.py
|
huxley/api/tests/committee.py
|
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
|
Add copyright header to CommitteeDetailGetTestCase.
|
Add copyright header to CommitteeDetailGetTestCase.
|
Python
|
bsd-3-clause
|
bmun/huxley,ctmunwebmaster/huxley,nathanielparke/huxley,ctmunwebmaster/huxley,bmun/huxley,bmun/huxley,ctmunwebmaster/huxley,nathanielparke/huxley,bmun/huxley,nathanielparke/huxley,nathanielparke/huxley,ctmunwebmaster/huxley
|
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
Add copyright header to CommitteeDetailGetTestCase.
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
|
<commit_before>import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
<commit_msg>Add copyright header to CommitteeDetailGetTestCase.<commit_after>
|
# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
|
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
Add copyright header to CommitteeDetailGetTestCase.# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
|
<commit_before>import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
<commit_msg>Add copyright header to CommitteeDetailGetTestCase.<commit_after># Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from django.core.urlresolvers import reverse
from django.test import TestCase
from django.test.client import Client
from huxley.utils.test import TestCommittees
class CommitteeDetailGetTestCase(TestCase):
def setUp(self):
self.client = Client()
def get_url(self, committee_id):
return reverse('api:committee_detail', args=(committee_id,))
def get_response(self, url):
return json.loads(self.client.get(url).content)
def test_anonymous_user(self):
'''It should return the correct fields for a committee.'''
c = TestCommittees.new_committee()
url = self.get_url(c.id)
data = self.get_response(url)
self.assertEqual(data['delegation_size'], c.delegation_size)
self.assertEqual(data['special'], c.special)
self.assertEqual(data['id'], c.id)
self.assertEqual(data['full_name'], c.full_name)
self.assertEqual(data['name'], c.name)
|
2a68505e36358900e045f74a8b2885486f6a302e
|
framework/guid/model.py
|
framework/guid/model.py
|
from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField(backref='guid')
_meta = {
'optimistic': True
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def __init__(self, *args, **kwargs):
"""Overridden constructor. When a GuidStoredObject is instantiated,
create a new Guid if the object doesn't already have one, then attach
the Guid to the StoredObject.
Note: This requires saving the StoredObject once and the Guid twice to
ensure correct back-references; this could be made more efficient if
modular-odm could handle back-references of objects that have not been
saved.
"""
# Call superclass constructor
super(GuidStoredObject, self).__init__(*args, **kwargs)
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
self.save()
# Add self to GUID
guid.referent = self
guid.save()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
|
from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField()
_meta = {
'optimistic': True,
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def _ensure_guid(self):
"""Create GUID record if current record doesn't already have one, then
point GUID to self.
"""
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
guid.referent = (guid._primary_key, self._name)
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
def __init__(self, *args, **kwargs):
""" Ensure GUID after initialization. """
super(GuidStoredObject, self).__init__(*args, **kwargs)
self._ensure_guid()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
|
Remove backref on GUID; factor out _ensure_guid
|
Remove backref on GUID; factor out _ensure_guid
|
Python
|
apache-2.0
|
arpitar/osf.io,rdhyee/osf.io,lyndsysimon/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,abought/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io,emetsger/osf.io,kwierman/osf.io,barbour-em/osf.io,icereval/osf.io,Johnetordoff/osf.io,jolene-esposito/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,barbour-em/osf.io,binoculars/osf.io,acshi/osf.io,sbt9uc/osf.io,kushG/osf.io,mluo613/osf.io,KAsante95/osf.io,RomanZWang/osf.io,amyshi188/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,icereval/osf.io,dplorimer/osf,brandonPurvis/osf.io,samchrisinger/osf.io,hmoco/osf.io,ckc6cz/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,ticklemepierce/osf.io,DanielSBrown/osf.io,mluke93/osf.io,haoyuchen1992/osf.io,samanehsan/osf.io,TomBaxter/osf.io,arpitar/osf.io,GaryKriebel/osf.io,jmcarp/osf.io,alexschiller/osf.io,revanthkolli/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,mattclark/osf.io,hmoco/osf.io,GaryKriebel/osf.io,zkraime/osf.io,sbt9uc/osf.io,kch8qx/osf.io,mluo613/osf.io,himanshuo/osf.io,monikagrabowska/osf.io,reinaH/osf.io,TomHeatwole/osf.io,samchrisinger/osf.io,emetsger/osf.io,arpitar/osf.io,asanfilippo7/osf.io,lamdnhan/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,reinaH/osf.io,danielneis/osf.io,billyhunt/osf.io,jmcarp/osf.io,ckc6cz/osf.io,erinspace/osf.io,ticklemepierce/osf.io,GaryKriebel/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,GageGaskins/osf.io,mattclark/osf.io,caseyrygt/osf.io,himanshuo/osf.io,emetsger/osf.io,kch8qx/osf.io,cosenal/osf.io,jinluyuan/osf.io,felliott/osf.io,HarryRybacki/osf.io,bdyetton/prettychart,emetsger/osf.io,barbour-em/osf.io,jolene-esposito/osf.io,revanthkolli/osf.io,MerlinZhang/osf.io,zkraime/osf.io,crcresearch/osf.io,doublebits/osf.io,chennan47/osf.io,GageGaskins/osf.io,pattisdr/osf.io,SSJohns/osf.io,njantrania/osf.io,dplorimer/osf,fabianvf/osf.io,caneruguz/osf.io,rdhyee/osf.io,hmoco/osf.io,mfraezz/osf.io,zkraime/osf.io,Ghalko/osf.io,kushG/osf.io,fabianvf/osf.io,samchrisinger/osf.io,AndrewSallans/osf.io,jinluyuan/osf.io,chennan47/osf.io,amyshi188/osf.io,jinluyuan/osf.io,leb2dg/osf.io,chrisseto/osf.io,danielneis/osf.io,acshi/osf.io,caseyrygt/osf.io,brianjgeiger/osf.io,ZobairAlijan/osf.io,jeffreyliu3230/osf.io,njantrania/osf.io,CenterForOpenScience/osf.io,lamdnhan/osf.io,bdyetton/prettychart,dplorimer/osf,mattclark/osf.io,caseyrollins/osf.io,mluke93/osf.io,DanielSBrown/osf.io,GageGaskins/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,laurenrevere/osf.io,KAsante95/osf.io,doublebits/osf.io,jnayak1/osf.io,doublebits/osf.io,zachjanicki/osf.io,caneruguz/osf.io,adlius/osf.io,jolene-esposito/osf.io,cldershem/osf.io,felliott/osf.io,saradbowman/osf.io,Ghalko/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,sloria/osf.io,ckc6cz/osf.io,binoculars/osf.io,cslzchen/osf.io,bdyetton/prettychart,cslzchen/osf.io,cosenal/osf.io,brandonPurvis/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,caseyrollins/osf.io,MerlinZhang/osf.io,fabianvf/osf.io,sloria/osf.io,DanielSBrown/osf.io,reinaH/osf.io,kwierman/osf.io,zamattiac/osf.io,zamattiac/osf.io,hmoco/osf.io,ckc6cz/osf.io,zamattiac/osf.io,danielneis/osf.io,alexschiller/osf.io,zkraime/osf.io,GaryKriebel/osf.io,erinspace/osf.io,mluo613/osf.io,alexschiller/osf.io,fabianvf/osf.io,aaxelb/osf.io,Nesiehr/osf.io,doublebits/osf.io,baylee-d/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,sbt9uc/osf.io,mluke93/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,jeffreyliu3230/osf.io,felliott/osf.io,acshi/osf.io,jnayak1/osf.io,lamdnhan/osf.io,jolene-esposito/osf.io,erinspace/osf.io,amyshi188/osf.io,crcresearch/osf.io,acshi/osf.io,mluo613/osf.io,samanehsan/osf.io,zachjanicki/osf.io,bdyetton/prettychart,brandonPurvis/osf.io,njantrania/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,abought/osf.io,MerlinZhang/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,kch8qx/osf.io,lamdnhan/osf.io,leb2dg/osf.io,revanthkolli/osf.io,zachjanicki/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,chrisseto/osf.io,njantrania/osf.io,saradbowman/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,cosenal/osf.io,HarryRybacki/osf.io,jeffreyliu3230/osf.io,revanthkolli/osf.io,jnayak1/osf.io,icereval/osf.io,cwisecarver/osf.io,cslzchen/osf.io,chennan47/osf.io,jnayak1/osf.io,kch8qx/osf.io,wearpants/osf.io,kwierman/osf.io,kushG/osf.io,jinluyuan/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,AndrewSallans/osf.io,danielneis/osf.io,petermalcolm/osf.io,petermalcolm/osf.io,wearpants/osf.io,cwisecarver/osf.io,HarryRybacki/osf.io,Ghalko/osf.io,dplorimer/osf,mfraezz/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,lyndsysimon/osf.io,billyhunt/osf.io,felliott/osf.io,Nesiehr/osf.io,acshi/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,jeffreyliu3230/osf.io,doublebits/osf.io,RomanZWang/osf.io,arpitar/osf.io,billyhunt/osf.io,barbour-em/osf.io,MerlinZhang/osf.io,himanshuo/osf.io,TomBaxter/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,ticklemepierce/osf.io,caseyrollins/osf.io,leb2dg/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,GageGaskins/osf.io,KAsante95/osf.io,binoculars/osf.io,zamattiac/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,samanehsan/osf.io,petermalcolm/osf.io,TomBaxter/osf.io,samanehsan/osf.io,KAsante95/osf.io,cosenal/osf.io,adlius/osf.io,baylee-d/osf.io,RomanZWang/osf.io,baylee-d/osf.io,SSJohns/osf.io,zachjanicki/osf.io,chrisseto/osf.io,sloria/osf.io,SSJohns/osf.io,TomHeatwole/osf.io,adlius/osf.io,brianjgeiger/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,chrisseto/osf.io,jmcarp/osf.io,kushG/osf.io,cldershem/osf.io,TomHeatwole/osf.io,mluo613/osf.io,reinaH/osf.io,abought/osf.io,mluke93/osf.io,jmcarp/osf.io,cldershem/osf.io,crcresearch/osf.io,kwierman/osf.io,Ghalko/osf.io,himanshuo/osf.io,wearpants/osf.io,laurenrevere/osf.io,leb2dg/osf.io,HarryRybacki/osf.io,adlius/osf.io,pattisdr/osf.io,abought/osf.io,ZobairAlijan/osf.io
|
from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField(backref='guid')
_meta = {
'optimistic': True
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def __init__(self, *args, **kwargs):
"""Overridden constructor. When a GuidStoredObject is instantiated,
create a new Guid if the object doesn't already have one, then attach
the Guid to the StoredObject.
Note: This requires saving the StoredObject once and the Guid twice to
ensure correct back-references; this could be made more efficient if
modular-odm could handle back-references of objects that have not been
saved.
"""
# Call superclass constructor
super(GuidStoredObject, self).__init__(*args, **kwargs)
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
self.save()
# Add self to GUID
guid.referent = self
guid.save()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
Remove backref on GUID; factor out _ensure_guid
|
from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField()
_meta = {
'optimistic': True,
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def _ensure_guid(self):
"""Create GUID record if current record doesn't already have one, then
point GUID to self.
"""
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
guid.referent = (guid._primary_key, self._name)
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
def __init__(self, *args, **kwargs):
""" Ensure GUID after initialization. """
super(GuidStoredObject, self).__init__(*args, **kwargs)
self._ensure_guid()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
|
<commit_before>from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField(backref='guid')
_meta = {
'optimistic': True
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def __init__(self, *args, **kwargs):
"""Overridden constructor. When a GuidStoredObject is instantiated,
create a new Guid if the object doesn't already have one, then attach
the Guid to the StoredObject.
Note: This requires saving the StoredObject once and the Guid twice to
ensure correct back-references; this could be made more efficient if
modular-odm could handle back-references of objects that have not been
saved.
"""
# Call superclass constructor
super(GuidStoredObject, self).__init__(*args, **kwargs)
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
self.save()
# Add self to GUID
guid.referent = self
guid.save()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
<commit_msg>Remove backref on GUID; factor out _ensure_guid<commit_after>
|
from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField()
_meta = {
'optimistic': True,
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def _ensure_guid(self):
"""Create GUID record if current record doesn't already have one, then
point GUID to self.
"""
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
guid.referent = (guid._primary_key, self._name)
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
def __init__(self, *args, **kwargs):
""" Ensure GUID after initialization. """
super(GuidStoredObject, self).__init__(*args, **kwargs)
self._ensure_guid()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
|
from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField(backref='guid')
_meta = {
'optimistic': True
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def __init__(self, *args, **kwargs):
"""Overridden constructor. When a GuidStoredObject is instantiated,
create a new Guid if the object doesn't already have one, then attach
the Guid to the StoredObject.
Note: This requires saving the StoredObject once and the Guid twice to
ensure correct back-references; this could be made more efficient if
modular-odm could handle back-references of objects that have not been
saved.
"""
# Call superclass constructor
super(GuidStoredObject, self).__init__(*args, **kwargs)
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
self.save()
# Add self to GUID
guid.referent = self
guid.save()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
Remove backref on GUID; factor out _ensure_guidfrom framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField()
_meta = {
'optimistic': True,
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def _ensure_guid(self):
"""Create GUID record if current record doesn't already have one, then
point GUID to self.
"""
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
guid.referent = (guid._primary_key, self._name)
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
def __init__(self, *args, **kwargs):
""" Ensure GUID after initialization. """
super(GuidStoredObject, self).__init__(*args, **kwargs)
self._ensure_guid()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
|
<commit_before>from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField(backref='guid')
_meta = {
'optimistic': True
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def __init__(self, *args, **kwargs):
"""Overridden constructor. When a GuidStoredObject is instantiated,
create a new Guid if the object doesn't already have one, then attach
the Guid to the StoredObject.
Note: This requires saving the StoredObject once and the Guid twice to
ensure correct back-references; this could be made more efficient if
modular-odm could handle back-references of objects that have not been
saved.
"""
# Call superclass constructor
super(GuidStoredObject, self).__init__(*args, **kwargs)
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
self.save()
# Add self to GUID
guid.referent = self
guid.save()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
<commit_msg>Remove backref on GUID; factor out _ensure_guid<commit_after>from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField()
_meta = {
'optimistic': True,
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def _ensure_guid(self):
"""Create GUID record if current record doesn't already have one, then
point GUID to self.
"""
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
guid.referent = (guid._primary_key, self._name)
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
def __init__(self, *args, **kwargs):
""" Ensure GUID after initialization. """
super(GuidStoredObject, self).__init__(*args, **kwargs)
self._ensure_guid()
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
|
21b022362a09c4e408b9375a38505975e8c7f965
|
comet/utility/test/test_whitelist.py
|
comet/utility/test/test_whitelist.py
|
from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0))
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0))
)
|
from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
None
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0)),
None
)
|
Remove assertIsNone for Python 2.6 compatibility
|
Remove assertIsNone for Python 2.6 compatibility
|
Python
|
bsd-2-clause
|
jdswinbank/Comet,jdswinbank/Comet
|
from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0))
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0))
)
Remove assertIsNone for Python 2.6 compatibility
|
from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
None
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0)),
None
)
|
<commit_before>from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0))
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0))
)
<commit_msg>Remove assertIsNone for Python 2.6 compatibility<commit_after>
|
from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
None
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0)),
None
)
|
from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0))
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0))
)
Remove assertIsNone for Python 2.6 compatibilityfrom ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
None
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0)),
None
)
|
<commit_before>from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0))
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertIsNone(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0))
)
<commit_msg>Remove assertIsNone for Python 2.6 compatibility<commit_after>from ipaddr import IPNetwork
from twisted.internet.protocol import Protocol
from twisted.internet.address import IPv4Address
from twisted.trial import unittest
from ...test.support import DummyEvent
from ..whitelist import WhitelistingFactory
WhitelistingFactory.protocol = Protocol
class WhitelistingFactoryTestCase(unittest.TestCase):
def test_empty_whitelist(self):
# All connections should be denied
factory = WhitelistingFactory([])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
None
)
def test_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('0.0.0.0/0')])
self.assertIsInstance(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.1', 0)),
Protocol
)
def test_not_in_whitelist(self):
factory = WhitelistingFactory([IPNetwork('127.0.0.1/32')])
self.assertEqual(
factory.buildProtocol(IPv4Address('TCP', '127.0.0.2', 0)),
None
)
|
40616138673205b3b4f3150a659ab02830b2bbc0
|
tests/test_player_creation.py
|
tests/test_player_creation.py
|
from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
app.post('/players', params)
res = app.get('/players')
assert res.status_int == 200
assert res.content_type == 'application/json'
assert res.json == expected
|
from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
post_response = app.post('/players', params)
assert post_response.status_int == 201
get_response = app.get('/players')
assert get_response.status_int == 200
assert get_response.content_type == 'application/json'
assert get_response.json == expected
|
Update player creation test to verify POST status code.
|
Update player creation test to verify POST status code.
|
Python
|
mit
|
dropshot/dropshot-server
|
from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
app.post('/players', params)
res = app.get('/players')
assert res.status_int == 200
assert res.content_type == 'application/json'
assert res.json == expected
Update player creation test to verify POST status code.
|
from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
post_response = app.post('/players', params)
assert post_response.status_int == 201
get_response = app.get('/players')
assert get_response.status_int == 200
assert get_response.content_type == 'application/json'
assert get_response.json == expected
|
<commit_before>from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
app.post('/players', params)
res = app.get('/players')
assert res.status_int == 200
assert res.content_type == 'application/json'
assert res.json == expected
<commit_msg>Update player creation test to verify POST status code.<commit_after>
|
from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
post_response = app.post('/players', params)
assert post_response.status_int == 201
get_response = app.get('/players')
assert get_response.status_int == 200
assert get_response.content_type == 'application/json'
assert get_response.json == expected
|
from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
app.post('/players', params)
res = app.get('/players')
assert res.status_int == 200
assert res.content_type == 'application/json'
assert res.json == expected
Update player creation test to verify POST status code.from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
post_response = app.post('/players', params)
assert post_response.status_int == 201
get_response = app.get('/players')
assert get_response.status_int == 200
assert get_response.content_type == 'application/json'
assert get_response.json == expected
|
<commit_before>from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
app.post('/players', params)
res = app.get('/players')
assert res.status_int == 200
assert res.content_type == 'application/json'
assert res.json == expected
<commit_msg>Update player creation test to verify POST status code.<commit_after>from webtest import TestApp
import dropshot
def test_create_player():
app = TestApp(dropshot.app)
params = {'username': 'chapmang',
'password': 'deadparrot',
'email': 'chapmang@dropshot.com'}
expected = {'count': 1,
'offset': 0,
'players': [
{'gamesPlayed': 0,
'username': 'chapmang'}
]}
post_response = app.post('/players', params)
assert post_response.status_int == 201
get_response = app.get('/players')
assert get_response.status_int == 200
assert get_response.content_type == 'application/json'
assert get_response.json == expected
|
4921d58775faa65423fac321ef68f065b2499813
|
experiments/hydrotrend-uq-1/plot_results.py
|
experiments/hydrotrend-uq-1/plot_results.py
|
#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
tab_file = 'dakota.dat'
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-dace-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_stdev.png')
|
#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
from dakota_utils.convert import has_interface_column, strip_interface_column
tab_file = 'dakota.dat'
if has_interface_column(tab_file):
strip_interface_column(tab_file)
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-uq-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_stdev.png')
|
Update script for Dakota 6.1 tabular output file
|
Update script for Dakota 6.1 tabular output file
|
Python
|
mit
|
mcflugen/dakota-experiments,mdpiper/dakota-experiments,mdpiper/dakota-experiments,mdpiper/dakota-experiments,mcflugen/dakota-experiments
|
#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
tab_file = 'dakota.dat'
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-dace-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_stdev.png')
Update script for Dakota 6.1 tabular output file
|
#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
from dakota_utils.convert import has_interface_column, strip_interface_column
tab_file = 'dakota.dat'
if has_interface_column(tab_file):
strip_interface_column(tab_file)
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-uq-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_stdev.png')
|
<commit_before>#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
tab_file = 'dakota.dat'
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-dace-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_stdev.png')
<commit_msg>Update script for Dakota 6.1 tabular output file<commit_after>
|
#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
from dakota_utils.convert import has_interface_column, strip_interface_column
tab_file = 'dakota.dat'
if has_interface_column(tab_file):
strip_interface_column(tab_file)
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-uq-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_stdev.png')
|
#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
tab_file = 'dakota.dat'
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-dace-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_stdev.png')
Update script for Dakota 6.1 tabular output file#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
from dakota_utils.convert import has_interface_column, strip_interface_column
tab_file = 'dakota.dat'
if has_interface_column(tab_file):
strip_interface_column(tab_file)
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-uq-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_stdev.png')
|
<commit_before>#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
tab_file = 'dakota.dat'
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-dace-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Qs(T,P)', \
outfile='dakota-hydrotrend-dace-1-Qs_stdev.png')
<commit_msg>Update script for Dakota 6.1 tabular output file<commit_after>#!/usr/bin/env python
# Makes a standard set of plots from Dakota output.
# Mark Piper (mark.piper@colorado.edu)
# Note that these imports are from the installed version of dakota_utils.
from dakota_utils.read import read_tabular
from dakota_utils.plot import plot_samples, plot_irregular_surface
from dakota_utils.convert import has_interface_column, strip_interface_column
tab_file = 'dakota.dat'
if has_interface_column(tab_file):
strip_interface_column(tab_file)
tab_data = read_tabular(tab_file)
plot_samples(tab_data, \
outfile='dakota-hydrotrend-uq-1-lhs-samples.png')
plot_irregular_surface(tab_data, response_index=-2, \
title='HydroTrend: Mean Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_mean.png')
plot_irregular_surface(tab_data, response_index=-1, \
title='HydroTrend: Stdev Q(T,P)', \
outfile='dakota-hydrotrend-uq-1-Q_stdev.png')
|
a263926614a2f9c0c5c41d19282db79ac5e79e7e
|
gittip/orm/__init__.py
|
gittip/orm/__init__.py
|
from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()
|
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
Remove the convenience functions, reorganize around the SQLAlchemy class
|
Remove the convenience functions, reorganize around the SQLAlchemy class
|
Python
|
mit
|
eXcomm/gratipay.com,gratipay/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,bountysource/www.gittip.com,MikeFair/www.gittip.com,eXcomm/gratipay.com,eXcomm/gratipay.com,eXcomm/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,bountysource/www.gittip.com,bountysource/www.gittip.com,gratipay/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,MikeFair/www.gittip.com,mccolgst/www.gittip.com,MikeFair/www.gittip.com,studio666/gratipay.com,bountysource/www.gittip.com
|
from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()Remove the convenience functions, reorganize around the SQLAlchemy class
|
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
<commit_before>from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()<commit_msg>Remove the convenience functions, reorganize around the SQLAlchemy class<commit_after>
|
from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()Remove the convenience functions, reorganize around the SQLAlchemy classfrom __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
<commit_before>from __future__ import unicode_literals
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
db = SQLAlchemy()
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
def save(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
Base = declarative_base(cls=Model)
Base.metadata.bind = db.engine
Base.query = db.session.query_property()
metadata = MetaData()
metadata.bind = db.engine
all = [Base, db, metadata]
def rollback(*_):
db.session.rollback()<commit_msg>Remove the convenience functions, reorganize around the SQLAlchemy class<commit_after>from __future__ import unicode_literals
import os
import pdb
from sqlalchemy import create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, scoped_session
class Model(object):
def __repr__(self):
cols = self.__mapper__.c.keys()
class_name = self.__class__.__name__
items = ', '.join(['%s=%s' % (col, repr(getattr(self, col))) for col
in cols])
return '%s(%s)' % (class_name, items)
def attrs_dict(self):
keys = self.__mapper__.c.keys()
attrs = {}
for key in keys:
attrs[key] = getattr(self, key)
return attrs
class SQLAlchemy(object):
def __init__(self):
self.session = self.create_session()
self.Model = self.make_declarative_base()
@property
def engine(self):
dburl = os.environ['DATABASE_URL']
return create_engine(dburl)
def create_session(self):
session = scoped_session(sessionmaker())
session.configure(bind=self.engine)
return session
def make_declarative_base(self):
base = declarative_base(cls=Model)
base.query = self.session.query_property()
return base
db = SQLAlchemy()
all = [db]
def rollback(*_):
db.session.rollback()
|
766735563fe327363e87a103fd70f88a896bf9a8
|
version.py
|
version.py
|
major = 0
minor=0
patch=22
branch="master"
timestamp=1376526477.22
|
major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52
|
Tag commit for v0.0.23-master generated by gitmake.py
|
Tag commit for v0.0.23-master generated by gitmake.py
|
Python
|
mit
|
ryansturmer/gitmake
|
major = 0
minor=0
patch=22
branch="master"
timestamp=1376526477.22Tag commit for v0.0.23-master generated by gitmake.py
|
major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52
|
<commit_before>major = 0
minor=0
patch=22
branch="master"
timestamp=1376526477.22<commit_msg>Tag commit for v0.0.23-master generated by gitmake.py<commit_after>
|
major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52
|
major = 0
minor=0
patch=22
branch="master"
timestamp=1376526477.22Tag commit for v0.0.23-master generated by gitmake.pymajor = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52
|
<commit_before>major = 0
minor=0
patch=22
branch="master"
timestamp=1376526477.22<commit_msg>Tag commit for v0.0.23-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=23
branch="master"
timestamp=1376526646.52
|
0e4b717389f032bef856d646afe0912a76519738
|
setup.py
|
setup.py
|
#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
|
#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
long_description_content_type='text/x-rst',
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
|
Set content type of package desription
|
Set content type of package desription
|
Python
|
mit
|
oemof/demandlib
|
#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
Set content type of package desription
|
#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
long_description_content_type='text/x-rst',
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
|
<commit_before>#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
<commit_msg>Set content type of package desription<commit_after>
|
#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
long_description_content_type='text/x-rst',
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
|
#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
Set content type of package desription#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
long_description_content_type='text/x-rst',
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
|
<commit_before>#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
<commit_msg>Set content type of package desription<commit_after>#! /usr/bin/env python
"""Setup information of demandlib.
"""
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='demandlib',
version='0.1.7',
author='oemof developer group',
url='https://oemof.org/',
license='MIT',
author_email='contact@oemof.org',
description='Demandlib of the open energy modelling framework',
long_description=read('README.rst'),
long_description_content_type='text/x-rst',
packages=find_packages(),
install_requires=['numpy >= 1.17.0',
'pandas >= 1.0'],
package_data={
'demandlib': [os.path.join('bdew_data', '*.csv')],
'demandlib.examples': ['*.csv']},
)
|
584e78390e61833cede3865ad205a51b561818e2
|
setup.py
|
setup.py
|
"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'grip': ['templates/*.html']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
|
"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'': ['LICENSE'], 'grip': ['static/*', 'templates/*']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
|
Add LICENSE and static/ back to package data.
|
Add LICENSE and static/ back to package data.
Adding the static/ just in case there's ever any static
files later on, since it's a standard Flask folder.
Also, adding all files in templates/ so there's
no surprises later on in development.
|
Python
|
mit
|
jbarreras/grip,joeyespo/grip,ssundarraj/grip,joeyespo/grip,mgoddard-pivotal/grip,ssundarraj/grip,mgoddard-pivotal/grip,jbarreras/grip
|
"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'grip': ['templates/*.html']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
Add LICENSE and static/ back to package data.
Adding the static/ just in case there's ever any static
files later on, since it's a standard Flask folder.
Also, adding all files in templates/ so there's
no surprises later on in development.
|
"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'': ['LICENSE'], 'grip': ['static/*', 'templates/*']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
|
<commit_before>"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'grip': ['templates/*.html']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
<commit_msg>Add LICENSE and static/ back to package data.
Adding the static/ just in case there's ever any static
files later on, since it's a standard Flask folder.
Also, adding all files in templates/ so there's
no surprises later on in development.<commit_after>
|
"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'': ['LICENSE'], 'grip': ['static/*', 'templates/*']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
|
"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'grip': ['templates/*.html']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
Add LICENSE and static/ back to package data.
Adding the static/ just in case there's ever any static
files later on, since it's a standard Flask folder.
Also, adding all files in templates/ so there's
no surprises later on in development."""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'': ['LICENSE'], 'grip': ['static/*', 'templates/*']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
|
<commit_before>"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'grip': ['templates/*.html']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
<commit_msg>Add LICENSE and static/ back to package data.
Adding the static/ just in case there's ever any static
files later on, since it's a standard Flask folder.
Also, adding all files in templates/ so there's
no surprises later on in development.<commit_after>"""\
Grip
----
Render local readme files before sending off to Github.
Grip is easy to set up
``````````````````````
::
$ pip install grip
$ cd myproject
$ grip
* Running on http://localhost:5000/
Links
`````
* `Website <http://github.com/joeyespo/grip>`_
"""
import os
import sys
from setuptools import setup, find_packages
if sys.argv[-1] == 'publish':
sys.exit(os.system('python setup.py sdist upload'))
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
setup(
name='grip',
version='1.2.0',
description='Render local readme files before sending off to Github.',
long_description=__doc__,
author='Joe Esposito',
author_email='joe@joeyespo.com',
url='http://github.com/joeyespo/grip',
license='MIT',
platforms='any',
packages=find_packages(),
package_data={'': ['LICENSE'], 'grip': ['static/*', 'templates/*']},
install_requires=read('requirements.txt'),
zip_safe=False,
entry_points={'console_scripts': ['grip = grip.command:main']},
)
|
9181085fb10b3c2281fbfbb69c9bb2b89a7990dd
|
setup.py
|
setup.py
|
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': 'GeoLiteCity.dat'
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': ['GeoLiteCity.dat']
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
Package data requires a list...
|
Package data requires a list...
|
Python
|
mit
|
pebble/pypkjs
|
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': 'GeoLiteCity.dat'
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
Package data requires a list...
|
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': ['GeoLiteCity.dat']
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
<commit_before>__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': 'GeoLiteCity.dat'
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
<commit_msg>Package data requires a list...<commit_after>
|
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': ['GeoLiteCity.dat']
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': 'GeoLiteCity.dat'
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
Package data requires a list...__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': ['GeoLiteCity.dat']
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
<commit_before>__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': 'GeoLiteCity.dat'
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
<commit_msg>Package data requires a list...<commit_after>__author__ = 'katharine'
import os
import sys
from setuptools import setup, find_packages
requirements_path = os.path.join(os.path.dirname(__file__), 'requirements.txt')
with open(requirements_path) as requirements_file:
requirements = [line.strip() for line in requirements_file.readlines()]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requirements,
package_data={
'javascript.navigator': ['GeoLiteCity.dat']
},
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=False)
|
fc48e79059fd1ad667aa30690cb136c027fd9314
|
setup.py
|
setup.py
|
import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
shapes=tilezilla.cli.info:shapes
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
|
import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
|
Remove shapes command until it's ready
|
Remove shapes command until it's ready
|
Python
|
bsd-3-clause
|
ceholden/landsat_tile,ceholden/landsat_tiles,ceholden/landsat_tile,ceholden/tilezilla,ceholden/landsat_tiles
|
import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
shapes=tilezilla.cli.info:shapes
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
Remove shapes command until it's ready
|
import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
|
<commit_before>import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
shapes=tilezilla.cli.info:shapes
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
<commit_msg>Remove shapes command until it's ready<commit_after>
|
import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
|
import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
shapes=tilezilla.cli.info:shapes
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
Remove shapes command until it's readyimport os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
|
<commit_before>import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
shapes=tilezilla.cli.info:shapes
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
<commit_msg>Remove shapes command until it's ready<commit_after>import os
from setuptools import find_packages, setup
import sys
PY2 = sys.version_info[0] == 2
# Get version
with open(os.path.join('tilezilla', 'version.py')) as f:
for line in f:
if line.find('__version__') >= 0:
version = line.split("=")[1].strip()
version = version.strip('"').strip("'")
continue
install_requires = [
'click',
'click_plugins',
'numpy',
'GDAL',
'rasterio',
'shapely',
'clover',
'beautifulsoup4',
'lxml',
'pyyaml',
'jsonschema',
'sqlalchemy',
'sqlalchemy-utils',
]
if PY2:
install_requires += ['futures']
entry_points = '''
[console_scripts]
tilez=tilezilla.cli.main:cli
[tilez.commands]
ingest=tilezilla.cli.ingest:ingest
spew=tilezilla.cli.spew:spew
db=tilezilla.cli.db:db
'''
setup(
name='tilezilla',
version=version,
packages=find_packages(),
package_data={'tilezilla': ['data/*']},
include_package_data=True,
install_requires=install_requires,
entry_points=entry_points
)
|
b78a5649104380499eba6f13d8da4c04ef50a66d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='1.0',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='0.1',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
|
Change version to a more descriptive number
|
Change version to a more descriptive number
|
Python
|
mit
|
fmfi-svt-deadlock/libmfrc522.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='1.0',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
Change version to a more descriptive number
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='0.1',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='1.0',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
<commit_msg>Change version to a more descriptive number<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='0.1',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='1.0',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
Change version to a more descriptive number#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='0.1',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='1.0',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
<commit_msg>Change version to a more descriptive number<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(name='libmfrc522',
description='A library for communicating with the MFRC522 RFID module',
version='0.1',
url='https://github.com/fmfi-svt-gate/libmfrc522.py',
license='MIT',
packages=find_packages(),
install_requires=['crcmod'])
|
c8779edcb4078c799b7112625b5495f63a00e428
|
l10n_ro_partner_unique/models/res_partner.py
|
l10n_ro_partner_unique/models/res_partner.py
|
# Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
# Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
Add vat unique per comapny
|
Add vat unique per comapny
|
Python
|
agpl-3.0
|
OCA/l10n-romania,OCA/l10n-romania
|
# Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
Add vat unique per comapny
|
# Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
<commit_before># Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
<commit_msg>Add vat unique per comapny<commit_after>
|
# Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
# Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
Add vat unique per comapny# Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
<commit_before># Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
<commit_msg>Add vat unique per comapny<commit_after># Copyright (C) 2015 Forest and Biomass Romania
# Copyright (C) 2020 NextERP Romania
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import _, api, models
from odoo.exceptions import ValidationError
class ResPartner(models.Model):
_inherit = "res.partner"
@api.model
def _get_vat_nrc_constrain_domain(self):
domain = [
("company_id", "=", self.company_id.id if self.company_id else False),
("parent_id", "=", False),
("vat", "=", self.vat),
"|",
("nrc", "=", self.nrc),
("nrc", "=", False),
]
return domain
@api.constrains("vat", "nrc")
def _check_vat_nrc_unique(self):
for record in self:
if record.vat:
domain = record._get_vat_nrc_constrain_domain()
found = self.env["res.partner"].search(domain)
if len(found) > 1:
raise ValidationError(
_("The VAT and NRC pair (%s, %s) must be unique ids=%s!")
% (record.vat, record.nrc, found.ids)
)
|
158dc9e77a2f8ca6bd0a124b80f2dd10b5858731
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
]
)
|
from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
"beautifulsoup4"
]
)
|
Revert "remove beautifulsoup4 from requirements"
|
Revert "remove beautifulsoup4 from requirements"
This reverts commit e096c4d50a1fcc81a4f63b24d82f8f1dba9c493d.
Turns out we were actually using Beautiful Soup somewhere. Oops.
|
Python
|
bsd-3-clause
|
CottageLabs/OpenArticleGauge,CottageLabs/OpenArticleGauge,CottageLabs/OpenArticleGauge
|
from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
]
)
Revert "remove beautifulsoup4 from requirements"
This reverts commit e096c4d50a1fcc81a4f63b24d82f8f1dba9c493d.
Turns out we were actually using Beautiful Soup somewhere. Oops.
|
from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
"beautifulsoup4"
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
]
)
<commit_msg>Revert "remove beautifulsoup4 from requirements"
This reverts commit e096c4d50a1fcc81a4f63b24d82f8f1dba9c493d.
Turns out we were actually using Beautiful Soup somewhere. Oops.<commit_after>
|
from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
"beautifulsoup4"
]
)
|
from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
]
)
Revert "remove beautifulsoup4 from requirements"
This reverts commit e096c4d50a1fcc81a4f63b24d82f8f1dba9c493d.
Turns out we were actually using Beautiful Soup somewhere. Oops.from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
"beautifulsoup4"
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
]
)
<commit_msg>Revert "remove beautifulsoup4 from requirements"
This reverts commit e096c4d50a1fcc81a4f63b24d82f8f1dba9c493d.
Turns out we were actually using Beautiful Soup somewhere. Oops.<commit_after>from setuptools import setup, find_packages
setup(
name = 'openarticlegauge',
version = '0.0.1',
packages = find_packages(),
install_requires = [
"Flask==0.9",
"Jinja2==2.6",
"Werkzeug==0.8.3",
"amqp==1.0.6",
"anyjson==0.3.3",
"argparse==1.2.1",
"billiard==2.7.3.19",
"celery==3.0.13",
"kombu==2.5.4",
"python-dateutil==1.5",
"wsgiref==0.1.2",
"Flask-WTF",
"requests==1.1.0",
"redis",
"lxml",
"beautifulsoup4"
]
)
|
aadd0dbfcd271120ddc02354d51ba111653d87f0
|
setup.py
|
setup.py
|
import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=io.open('README.md', encoding='utf8').read(),
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
|
import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
with io.open('README.md', encoding='utf8') as f:
long_description = f.read()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=long_description,
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
|
Use a context manager for reading README.md.
|
Use a context manager for reading README.md.
|
Python
|
mit
|
dstein64/PyAnchorGraphHasher
|
import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=io.open('README.md', encoding='utf8').read(),
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
Use a context manager for reading README.md.
|
import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
with io.open('README.md', encoding='utf8') as f:
long_description = f.read()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=long_description,
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
|
<commit_before>import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=io.open('README.md', encoding='utf8').read(),
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
<commit_msg>Use a context manager for reading README.md.<commit_after>
|
import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
with io.open('README.md', encoding='utf8') as f:
long_description = f.read()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=long_description,
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
|
import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=io.open('README.md', encoding='utf8').read(),
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
Use a context manager for reading README.md.import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
with io.open('README.md', encoding='utf8') as f:
long_description = f.read()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=long_description,
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
|
<commit_before>import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=io.open('README.md', encoding='utf8').read(),
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
<commit_msg>Use a context manager for reading README.md.<commit_after>import io
import os
from setuptools import setup
version_txt = os.path.join(os.path.dirname(__file__), 'aghasher', 'version.txt')
with open(version_txt, 'r') as f:
version = f.read().strip()
with io.open('README.md', encoding='utf8') as f:
long_description = f.read()
setup(
author='Daniel Steinberg',
author_email='ds@dannyadam.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Information Analysis',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6'
],
description='An implementation of Anchor Graph Hashing',
install_requires=['numpy', 'scipy'],
keywords=['anchor-graph-hashing', 'hashing', 'locality-sensitive-hashing', 'machine-learning'],
license='MIT',
long_description=long_description,
long_description_content_type='text/markdown',
name='aghasher',
package_data={'aghasher': ['version.txt']},
packages=['aghasher'],
url='https://github.com/dstein64/aghasher',
version=version,
)
|
9e27d60ecd18a1b7bda8867cc1e064b5deec0370
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys, os
version = '0.1.4'
setup(name='pam',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Chris AtLee',
author_email='chris@atlee.ca',
url='http://atlee.ca/software/pam',
download_url = "http://atlee.ca/software/pam/dist/%s" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
import sys, os
version = '1.0'
setup(name='pam2',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Grzegorz Nosek',
author_email='root@localdomain.pl',
url='http://github.com/gnosek/python-pam',
download_url = "http://github.com/gnosek/python-pam/archive/%s.zip" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
|
Rename to pam2 as we're breaking the API, bump version
|
Rename to pam2 as we're breaking the API, bump version
|
Python
|
mit
|
rgbkrk/pamela
|
from setuptools import setup, find_packages
import sys, os
version = '0.1.4'
setup(name='pam',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Chris AtLee',
author_email='chris@atlee.ca',
url='http://atlee.ca/software/pam',
download_url = "http://atlee.ca/software/pam/dist/%s" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
Rename to pam2 as we're breaking the API, bump version
|
from setuptools import setup, find_packages
import sys, os
version = '1.0'
setup(name='pam2',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Grzegorz Nosek',
author_email='root@localdomain.pl',
url='http://github.com/gnosek/python-pam',
download_url = "http://github.com/gnosek/python-pam/archive/%s.zip" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.1.4'
setup(name='pam',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Chris AtLee',
author_email='chris@atlee.ca',
url='http://atlee.ca/software/pam',
download_url = "http://atlee.ca/software/pam/dist/%s" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Rename to pam2 as we're breaking the API, bump version<commit_after>
|
from setuptools import setup, find_packages
import sys, os
version = '1.0'
setup(name='pam2',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Grzegorz Nosek',
author_email='root@localdomain.pl',
url='http://github.com/gnosek/python-pam',
download_url = "http://github.com/gnosek/python-pam/archive/%s.zip" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
import sys, os
version = '0.1.4'
setup(name='pam',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Chris AtLee',
author_email='chris@atlee.ca',
url='http://atlee.ca/software/pam',
download_url = "http://atlee.ca/software/pam/dist/%s" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
Rename to pam2 as we're breaking the API, bump versionfrom setuptools import setup, find_packages
import sys, os
version = '1.0'
setup(name='pam2',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Grzegorz Nosek',
author_email='root@localdomain.pl',
url='http://github.com/gnosek/python-pam',
download_url = "http://github.com/gnosek/python-pam/archive/%s.zip" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
|
<commit_before>from setuptools import setup, find_packages
import sys, os
version = '0.1.4'
setup(name='pam',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Chris AtLee',
author_email='chris@atlee.ca',
url='http://atlee.ca/software/pam',
download_url = "http://atlee.ca/software/pam/dist/%s" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
<commit_msg>Rename to pam2 as we're breaking the API, bump version<commit_after>from setuptools import setup, find_packages
import sys, os
version = '1.0'
setup(name='pam2',
version=version,
description="PAM interface using ctypes",
long_description="""\
An interface to the Pluggable Authentication Modules (PAM) library on linux, written in pure python (using ctypes)""",
classifiers=["Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration :: Authentication/Directory"
],
keywords='',
author='Grzegorz Nosek',
author_email='root@localdomain.pl',
url='http://github.com/gnosek/python-pam',
download_url = "http://github.com/gnosek/python-pam/archive/%s.zip" % version,
license='MIT',
py_modules=["pam"],
zip_safe=True,
install_requires=[],
entry_points="""
# -*- Entry points: -*-
""",
)
|
d420f45071ab7ad049c412b456fe890d94cd47bd
|
setup.py
|
setup.py
|
"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
'pystache>=0.3.1',
],
tests_require=[
'Flask-Testing>=0.3',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
],
tests_require=[
'Flask-Testing>=0.3',
'pystache>=0.3.1',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Remove requirement on pystache (since nestache is now a viable alternative).
|
Remove requirement on pystache (since nestache is now a viable alternative).
|
Python
|
mit
|
ahri/flask-mustache
|
"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
'pystache>=0.3.1',
],
tests_require=[
'Flask-Testing>=0.3',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Remove requirement on pystache (since nestache is now a viable alternative).
|
"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
],
tests_require=[
'Flask-Testing>=0.3',
'pystache>=0.3.1',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
'pystache>=0.3.1',
],
tests_require=[
'Flask-Testing>=0.3',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Remove requirement on pystache (since nestache is now a viable alternative).<commit_after>
|
"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
],
tests_require=[
'Flask-Testing>=0.3',
'pystache>=0.3.1',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
'pystache>=0.3.1',
],
tests_require=[
'Flask-Testing>=0.3',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Remove requirement on pystache (since nestache is now a viable alternative)."""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
],
tests_require=[
'Flask-Testing>=0.3',
'pystache>=0.3.1',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
'pystache>=0.3.1',
],
tests_require=[
'Flask-Testing>=0.3',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Remove requirement on pystache (since nestache is now a viable alternative).<commit_after>"""
Flask-Mustache
--------------
Flask mustache integration.
Links
`````
* `development version
<http://github.com/ahri/flask-mustache>`_
"""
from setuptools import setup
setup(
name='Flask-Mustache',
version='0.1.1',
url='http://github.com/ahri/flask-mustache',
license='AGPLv3',
author='Adam Piper',
author_email='adam@ahri.net',
description='Mustache for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite="nose.collector",
zip_safe=False,
platforms='any',
install_requires=[
'Flask>=0.8',
],
tests_require=[
'Flask-Testing>=0.3',
'pystache>=0.3.1',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
693a7ecb45758676c3689f0294741acfc31e8e7c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow>=3.0.0b10', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
Add version constraint for marshmallow
|
Add version constraint for marshmallow
|
Python
|
apache-2.0
|
Bachmann1234/marshmallow-polyfield
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
Add version constraint for marshmallow
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow>=3.0.0b10', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
<commit_msg>Add version constraint for marshmallow<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow>=3.0.0b10', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
Add version constraint for marshmallow#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow>=3.0.0b10', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
<commit_msg>Add version constraint for marshmallow<commit_after>#!/usr/bin/env python
import os
from setuptools import setup
def read(fname):
with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), fname), 'r') as infile:
content = infile.read()
return content
setup(
name='marshmallow-polyfield',
version=5.0,
description='An unofficial extension to Marshmallow to allow for polymorphic fields',
long_description=read('README.rst'),
author='Matt Bachmann',
author_email='bachmann.matt@gmail.com',
url='https://github.com/Bachmann1234/marshmallow-polyfield',
packages=['marshmallow_polyfield', 'tests'],
license=read('LICENSE'),
keywords=('serialization', 'rest', 'json', 'api', 'marshal',
'marshalling', 'deserialization', 'validation', 'schema'),
install_requires=['marshmallow>=3.0.0b10', 'six'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
)
|
4685b04c3913450ef34f7e4e848d6021097fed21
|
setup.py
|
setup.py
|
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)
|
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Mesos Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)
|
Clarify this is Mesos-related in PyPI description
|
Clarify this is Mesos-related in PyPI description
|
Python
|
mit
|
elyast/marathon-python,burakbostancioglu/marathon-python,thefactory/marathon-python,Rob-Johnson/marathon-python,mattrobenolt/marathon-python,mattrobenolt/marathon-python,Carles-Figuerola/marathon-python,drewrobb/marathon-python,Rob-Johnson/marathon-python,thefactory/marathon-python,mesosphere/marathon-python,Yelp/marathon-python,drewrobb/marathon-python,elyast/marathon-python,fengyehong/marathon-python,Carles-Figuerola/marathon-python,burakbostancioglu/marathon-python,vitan/marathon-python,fengyehong/marathon-python,mesosphere/marathon-python,Yelp/marathon-python
|
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)Clarify this is Mesos-related in PyPI description
|
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Mesos Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)
|
<commit_before>import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)<commit_msg>Clarify this is Mesos-related in PyPI description<commit_after>
|
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Mesos Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)
|
import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)Clarify this is Mesos-related in PyPI descriptionimport sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Mesos Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)
|
<commit_before>import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)<commit_msg>Clarify this is Mesos-related in PyPI description<commit_after>import sys
from setuptools import setup
extra = {}
if sys.version_info >= (3,):
extra['use_2to3'] = True
setup(name='marathon',
version="0.2.8",
description='Marathon Client Library',
long_description="""Python interface to the Mesos Marathon REST API.""",
author='Mike Babineau',
author_email='michael.babineau@gmail.com',
install_requires=[ 'requests>=2.0.0' ],
url='https://github.com/thefactory/marathon-python',
packages=['marathon', 'marathon.models'],
license='MIT',
platforms='Posix; MacOS X; Windows',
classifiers=['Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'],
**extra
)
|
5d8ca3498d794ab264377b1bc31f52fcd97210ba
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1b1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK,DBus,audio",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
|
Add b1 to version number; add more keywords
|
Add b1 to version number; add more keywords
|
Python
|
mit
|
SpotlightKid/jack-select,SpotlightKid/jack-select
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
Add b1 to version number; add more keywords
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1b1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK,DBus,audio",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
<commit_msg>Add b1 to version number; add more keywords<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1b1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK,DBus,audio",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
Add b1 to version number; add more keywords#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1b1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK,DBus,audio",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
<commit_msg>Add b1 to version number; add more keywords<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name="jack-select",
version="0.1b1",
url="https://github.com/SpotlightKid/jack-select",
author="Christopher Arndt",
author_email="chris@chrisarndt.de",
description="A systray app to set the JACK configuration from QJackCtl "
"presets via DBus",
keywords="JACK,systray,GTK,DBus,audio",
packages=setuptools.find_packages(),
include_package_data=True,
install_requires=[
'PyGObject',
'dbus-python',
'pyxdg'
],
entry_points = {
'console_scripts': [
'jack-select = jackselect.jackselect:main',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: End users',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Environment :: X11 Applications :: GTK',
'Topic :: Multimedia :: Sound/Audio'
],
)
|
548cfea821bf1b0b92ce09c54405554d264b5395
|
tests/integration/session/test_timeout.py
|
tests/integration/session/test_timeout.py
|
import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
|
import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def tearDown(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 45 * 60
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 30
super().tearDown()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
|
Fix CSRF missing errors that happen occasionally in tests
|
Fix CSRF missing errors that happen occasionally in tests
|
Python
|
mit
|
ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner
|
import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
Fix CSRF missing errors that happen occasionally in tests
|
import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def tearDown(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 45 * 60
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 30
super().tearDown()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
|
<commit_before>import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
<commit_msg>Fix CSRF missing errors that happen occasionally in tests<commit_after>
|
import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def tearDown(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 45 * 60
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 30
super().tearDown()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
|
import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
Fix CSRF missing errors that happen occasionally in testsimport time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def tearDown(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 45 * 60
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 30
super().tearDown()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
|
<commit_before>import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
<commit_msg>Fix CSRF missing errors that happen occasionally in tests<commit_after>import time
from app import settings
from tests.integration.integration_test_case import IntegrationTestCase
class TestTimeout(IntegrationTestCase):
def setUp(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 1
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 0
super().setUp()
def tearDown(self):
settings.EQ_SESSION_TIMEOUT_SECONDS = 45 * 60
settings.EQ_SESSION_TIMEOUT_GRACE_PERIOD_SECONDS = 30
super().tearDown()
def test_timeout_continue_returns_200(self):
self.launchSurvey('test', 'timeout')
self.get('/timeout-continue')
self.assertStatusOK()
def test_when_session_times_out_server_side_401_is_returned(self):
self.launchSurvey('test', 'timeout')
time.sleep(2)
self.get(self.last_url)
self.assertStatusUnauthorised()
def test_schema_defined_timeout_is_used(self):
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 1')
def test_schema_defined_timeout_cant_be_higher_than_server(self):
self._application.config['EQ_SESSION_TIMEOUT_SECONDS'] = 10
self.launchSurvey('test', 'timeout')
self.assertInPage('window.__EQ_SESSION_TIMEOUT__ = 6')
|
a642a4f98016e2fa796c69bd74c36008601e4e5f
|
tools/wcloud/wcloud/deploymentsettings.py
|
tools/wcloud/wcloud/deploymentsettings.py
|
from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 10000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
|
from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 14000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
|
Establish different settings for minimal port
|
Establish different settings for minimal port
|
Python
|
bsd-2-clause
|
weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto
|
from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 10000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
Establish different settings for minimal port
|
from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 14000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
|
<commit_before>from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 10000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
<commit_msg>Establish different settings for minimal port<commit_after>
|
from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 14000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
|
from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 10000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
Establish different settings for minimal portfrom weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 14000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
|
<commit_before>from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 10000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
<commit_msg>Establish different settings for minimal port<commit_after>from weblab.admin.script import Creation
APACHE_CONF_NAME = 'apache.conf'
MIN_PORT = 14000
DEFAULT_DEPLOYMENT_SETTINGS = {
Creation.COORD_ENGINE: 'redis',
Creation.COORD_REDIS_DB: 0,
Creation.COORD_REDIS_PORT: 6379,
Creation.DB_ENGINE: 'mysql',
Creation.ADMIN_USER: 'CHANGE_ME', # --admin-user=admin
Creation.ADMIN_NAME: 'CHANGE_ME', # --admin-name=(lo que diga)
Creation.ADMIN_PASSWORD: 'CHANGE_ME', # --admin-password=(lo que diga)
Creation.ADMIN_MAIL: 'CHANGE_ME', # --admin-mail=(lo que diga)
Creation.START_PORTS: 'CHANGE_ME', # --start-port=10000
Creation.SYSTEM_IDENTIFIER: 'CHANGE_ME', # -i (nombre de la uni, puede tener espacios)
Creation.SERVER_HOST: 'weblab.deusto.es', # --server-host=(de settings)
Creation.ENTITY_LINK: 'http://www.deusto.es/', # --entity-link= http://www.deusto.es/
Creation.CORES: 3,
Creation.ADD_FEDERATED_LOGIC : True,
Creation.ADD_FEDERATED_VISIR : True,
Creation.ADD_FEDERATED_SUBMARINE : True,
}
|
89b5f2a7afaa5d8533500ab35dd418e996bd305d
|
setup.py
|
setup.py
|
from setuptools import setup
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5", url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images and generate corresponding CSS in one go",
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
|
from setuptools import setup
readme_text = open("README.rst", "U").read()
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5",
url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images "
"and generate corresponding CSS in one go",
long_description=readme_text,
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
|
Add long description and fix 80w
|
Add long description and fix 80w
|
Python
|
mit
|
wpj-cz/Spritemapper,yostudios/Spritemapper,wpj-cz/Spritemapper,wpj-cz/Spritemapper,yostudios/Spritemapper,yostudios/Spritemapper,wpj-cz/Spritemapper
|
from setuptools import setup
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5", url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images and generate corresponding CSS in one go",
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
Add long description and fix 80w
|
from setuptools import setup
readme_text = open("README.rst", "U").read()
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5",
url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images "
"and generate corresponding CSS in one go",
long_description=readme_text,
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
|
<commit_before>from setuptools import setup
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5", url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images and generate corresponding CSS in one go",
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
<commit_msg>Add long description and fix 80w<commit_after>
|
from setuptools import setup
readme_text = open("README.rst", "U").read()
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5",
url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images "
"and generate corresponding CSS in one go",
long_description=readme_text,
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
|
from setuptools import setup
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5", url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images and generate corresponding CSS in one go",
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
Add long description and fix 80wfrom setuptools import setup
readme_text = open("README.rst", "U").read()
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5",
url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images "
"and generate corresponding CSS in one go",
long_description=readme_text,
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
|
<commit_before>from setuptools import setup
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5", url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images and generate corresponding CSS in one go",
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
<commit_msg>Add long description and fix 80w<commit_after>from setuptools import setup
readme_text = open("README.rst", "U").read()
cli_tools = ["spritemapper = spritecss.main:main"]
setup(name="spritemapper", version="0.5",
url="http://yostudios.github.com/Spritemapper/",
author="Yo Studios AB", author_email="opensource@yostudios.se",
description="A suite for merging multiple images "
"and generate corresponding CSS in one go",
long_description=readme_text,
license="MIT/X11",
packages=["spritecss", "spritecss.css", "spritecss.packing"],
test_suite="nose.collector", tests_require=["nose"],
entry_points={"console_scripts": cli_tools})
|
5a8963c7be6bb328346d21f2b4afdc7256c7cd96
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1.dev1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
|
Make version a dev version
|
Make version a dev version
|
Python
|
bsd-2-clause
|
dstufft/crust
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
Make version a dev version
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1.dev1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
<commit_msg>Make version a dev version<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1.dev1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
Make version a dev version#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1.dev1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
<commit_msg>Make version a dev version<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="crust",
version="0.1.dev1",
description="Framework for Tastypie API Clients",
long_description=open("README.rst").read(),
url="https://github.com/dstufft/crust/",
license=open("LICENSE").read(),
author="Donald Stufft",
author_email="donald.stufft@gmail.com",
install_requires=[
"requests",
],
extras_require={
"test": ["pytest"]
},
packages=find_packages(exclude=["tests"]),
package_data={"": ["LICENSE"]},
include_package_data=True,
zip_safe=False,
)
|
36c2e7449b7817a66b60eaff4c8518ae6d4f4a01
|
categories/tests.py
|
categories/tests.py
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
def test_category_list(self):
categories = Category.objects.all()
response_data = CategorySerializer(categories, many=True).data
url = reverse('categories:category_list')
response = self.client.get(url, format='json')
self.assertEqual(response.data, response_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
|
Remove categoy_list test until urls will fixed.
|
Remove categoy_list test until urls will fixed.
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
def test_category_list(self):
categories = Category.objects.all()
response_data = CategorySerializer(categories, many=True).data
url = reverse('categories:category_list')
response = self.client.get(url, format='json')
self.assertEqual(response.data, response_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
Remove categoy_list test until urls will fixed.
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
|
<commit_before>from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
def test_category_list(self):
categories = Category.objects.all()
response_data = CategorySerializer(categories, many=True).data
url = reverse('categories:category_list')
response = self.client.get(url, format='json')
self.assertEqual(response.data, response_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Remove categoy_list test until urls will fixed.<commit_after>
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
|
from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
def test_category_list(self):
categories = Category.objects.all()
response_data = CategorySerializer(categories, many=True).data
url = reverse('categories:category_list')
response = self.client.get(url, format='json')
self.assertEqual(response.data, response_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
Remove categoy_list test until urls will fixed.from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
|
<commit_before>from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
def test_category_list(self):
categories = Category.objects.all()
response_data = CategorySerializer(categories, many=True).data
url = reverse('categories:category_list')
response = self.client.get(url, format='json')
self.assertEqual(response.data, response_data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
<commit_msg>Remove categoy_list test until urls will fixed.<commit_after>from .models import Category
from .serializers import CategorySerializer
from employees.models import Employee
from django.core.urlresolvers import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class CategoryTestCase(APITestCase):
def setUp(self):
Category.objects.create(name='Coworker')
Employee.objects.create_superuser('user1', 'user1@email.com', 'user1password')
Category.objects.create(name='Category1', weight=2)
Category.objects.create(name='Category2')
self.client.login(username='user1', password='user1password')
def test_category_creation(self):
category1 = Category.objects.get(name='Category1')
category2 = Category.objects.get(name='Category2')
self.assertEqual(category1.weight, 2)
self.assertEqual(category2.weight, 1)
|
1f1b6757f82ef2ad384c2360f22935abbbc09531
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.1',
'Jinja2==2.6',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.2',
'Jinja2==2.7',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
dependency_links=[
'http://github.com/cnelsonsic/flask-browserid/tarball/fix_setup_py#egg=Flask-BrowserId-0.0.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
|
Use my own fork of flask-browserid until it gets merged
|
Use my own fork of flask-browserid until it gets merged
|
Python
|
agpl-3.0
|
cnelsonsic/Certificator
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.1',
'Jinja2==2.6',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
Use my own fork of flask-browserid until it gets merged
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.2',
'Jinja2==2.7',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
dependency_links=[
'http://github.com/cnelsonsic/flask-browserid/tarball/fix_setup_py#egg=Flask-BrowserId-0.0.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.1',
'Jinja2==2.6',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
<commit_msg>Use my own fork of flask-browserid until it gets merged<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.2',
'Jinja2==2.7',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
dependency_links=[
'http://github.com/cnelsonsic/flask-browserid/tarball/fix_setup_py#egg=Flask-BrowserId-0.0.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.1',
'Jinja2==2.6',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
Use my own fork of flask-browserid until it gets merged#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.2',
'Jinja2==2.7',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
dependency_links=[
'http://github.com/cnelsonsic/flask-browserid/tarball/fix_setup_py#egg=Flask-BrowserId-0.0.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.1',
'Jinja2==2.6',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
<commit_msg>Use my own fork of flask-browserid until it gets merged<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(
name='Certificator',
version='0.1.0',
long_description=__doc__,
packages=['certificator'],
include_package_data=True,
zip_safe=False,
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.2.1',
'coverage==3.6',
'Flask-Testing==0.4',
],
install_requires=[
'Flask==0.10.1',
'Flask-Login==0.2.5',
'Flask-SQLAlchemy==0.16',
'Flask-Script==0.5.3',
'Flask-BrowserId==0.0.2',
'Jinja2==2.7',
'MarkupSafe==0.18',
'SQLAlchemy==0.8.1',
'Werkzeug==0.8.3',
'argparse==1.2.1',
'itsdangerous==0.22',
'requests==1.2.3',
'twill==0.9',
'wsgiref==0.1.2',
],
dependency_links=[
'http://github.com/cnelsonsic/flask-browserid/tarball/fix_setup_py#egg=Flask-BrowserId-0.0.2',
],
entry_points={
'console_scripts': [
'certificator-server = certificator.main:main',
],
}
)
|
fdf82d36b86dff35943bb00f73b915240f5bd68c
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
|
Change from beta -> stable for v1.0.x
|
Change from beta -> stable for v1.0.x
|
Python
|
bsd-3-clause
|
bennuttall/energenie,RPi-Distro/python-energenie,rjw57/energenie
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
Change from beta -> stable for v1.0.x
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
|
<commit_before>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
<commit_msg>Change from beta -> stable for v1.0.x<commit_after>
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
Change from beta -> stable for v1.0.ximport os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
|
<commit_before>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
<commit_msg>Change from beta -> stable for v1.0.x<commit_after>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="energenie",
version="1.0.1",
author="Ben Nuttall",
author_email="ben@raspberrypi.org",
description="Remotely control power sockets from the Raspberry Pi",
license="BSD",
keywords=[
"energenie",
"raspberrypi",
],
url="https://github.com/bennuttall/energenie",
packages=[
"energenie",
],
install_requires=[
"RPi.GPIO",
],
long_description=read('README.rst'),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
|
cf671f70974348ac202f613c6b090f05d4f2543b
|
setup.py
|
setup.py
|
try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.1.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
|
try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.2.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
|
Increment version in preperation for release of version 1.2.0
|
Increment version in preperation for release of version 1.2.0
|
Python
|
mit
|
chrisb2/pi_ina219
|
try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.1.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
Increment version in preperation for release of version 1.2.0
|
try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.2.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
|
<commit_before>try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.1.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
<commit_msg>Increment version in preperation for release of version 1.2.0<commit_after>
|
try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.2.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
|
try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.1.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
Increment version in preperation for release of version 1.2.0try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.2.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
|
<commit_before>try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.1.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
<commit_msg>Increment version in preperation for release of version 1.2.0<commit_after>try:
# Try using ez_setup to install setuptools if not already installed.
from ez_setup import use_setuptools
use_setuptools()
except ImportError:
# Ignore import error and assume Python 3 which already has setuptools.
pass
from setuptools import setup
DESC = ('This Python library for Raspberry Pi makes it easy to leverage the '
'complex functionality of the Texas Instruments INA219 '
'sensor to measure voltage, current and power.')
classifiers = ['Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: System :: Hardware :: Hardware Drivers']
# Define required packages.
requires = ['Adafruit_GPIO', 'mock']
def read_long_description():
try:
import pypandoc
return pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
return ""
setup(name='pi-ina219',
version='1.2.0',
author='Chris Borrill',
author_email='chris.borrill@gmail.com',
description=DESC,
long_description=read_long_description(),
license='MIT',
url='https://github.com/chrisb2/pi_ina219/',
classifiers=classifiers,
keywords='ina219 raspberrypi',
install_requires=requires,
test_suite='tests',
py_modules=['ina219'])
|
40958981df401a898a39ddad45c2b48669a44ee7
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=[
"parsimonious>=0.5,<0.6",
]
)
|
#!/usr/bin/env python
import os
import sys
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
_install_requires = [
"parsimonious>=0.5,<0.6",
]
if sys.version_info[:2] <= (2, 6):
_install_requires.append("argparse==1.2.1")
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=_install_requires,
)
|
Support CLI on Python 2.6
|
Support CLI on Python 2.6
|
Python
|
bsd-2-clause
|
mwilliamson/python-mammoth,JoshBarr/python-mammoth
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=[
"parsimonious>=0.5,<0.6",
]
)
Support CLI on Python 2.6
|
#!/usr/bin/env python
import os
import sys
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
_install_requires = [
"parsimonious>=0.5,<0.6",
]
if sys.version_info[:2] <= (2, 6):
_install_requires.append("argparse==1.2.1")
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=_install_requires,
)
|
<commit_before>#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=[
"parsimonious>=0.5,<0.6",
]
)
<commit_msg>Support CLI on Python 2.6<commit_after>
|
#!/usr/bin/env python
import os
import sys
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
_install_requires = [
"parsimonious>=0.5,<0.6",
]
if sys.version_info[:2] <= (2, 6):
_install_requires.append("argparse==1.2.1")
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=_install_requires,
)
|
#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=[
"parsimonious>=0.5,<0.6",
]
)
Support CLI on Python 2.6#!/usr/bin/env python
import os
import sys
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
_install_requires = [
"parsimonious>=0.5,<0.6",
]
if sys.version_info[:2] <= (2, 6):
_install_requires.append("argparse==1.2.1")
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=_install_requires,
)
|
<commit_before>#!/usr/bin/env python
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=[
"parsimonious>=0.5,<0.6",
]
)
<commit_msg>Support CLI on Python 2.6<commit_after>#!/usr/bin/env python
import os
import sys
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
_install_requires = [
"parsimonious>=0.5,<0.6",
]
if sys.version_info[:2] <= (2, 6):
_install_requires.append("argparse==1.2.1")
setup(
name='mammoth',
version='0.1.1',
description='Convert Word documents to simple and clean HTML',
long_description=read("README"),
author='Michael Williamson',
author_email='mike@zwobble.org',
url='http://github.com/mwilliamson/python-mammoth',
packages=['mammoth', 'mammoth.docx', 'mammoth.style_reader'],
scripts=["scripts/mammoth"],
keywords="docx word office clean html",
install_requires=_install_requires,
)
|
139d09ecd83694dd92d393b64d1d9b0ad05e9f4c
|
setup.py
|
setup.py
|
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
|
import distutils.core
# Uploading to PyPI
# =================
# The first time only:
# $ python setup.py register -r pypi
#
# Every version bump:
# $ git tag <version>; git push
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
|
Add instructions for bumping the version.
|
Add instructions for bumping the version.
|
Python
|
mit
|
kalekundert/nonstdlib,KenKundert/nonstdlib,kalekundert/nonstdlib,KenKundert/nonstdlib
|
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
Add instructions for bumping the version.
|
import distutils.core
# Uploading to PyPI
# =================
# The first time only:
# $ python setup.py register -r pypi
#
# Every version bump:
# $ git tag <version>; git push
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
|
<commit_before>import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
<commit_msg>Add instructions for bumping the version.<commit_after>
|
import distutils.core
# Uploading to PyPI
# =================
# The first time only:
# $ python setup.py register -r pypi
#
# Every version bump:
# $ git tag <version>; git push
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
|
import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
Add instructions for bumping the version.import distutils.core
# Uploading to PyPI
# =================
# The first time only:
# $ python setup.py register -r pypi
#
# Every version bump:
# $ git tag <version>; git push
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
|
<commit_before>import distutils.core
# Uploading to PyPI
# =================
# $ python setup.py register -r pypi
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
<commit_msg>Add instructions for bumping the version.<commit_after>import distutils.core
# Uploading to PyPI
# =================
# The first time only:
# $ python setup.py register -r pypi
#
# Every version bump:
# $ git tag <version>; git push
# $ python setup.py sdist upload -r pypi
version = '1.0'
distutils.core.setup(
name='nonstdlib',
version=version,
author='Kale Kundert',
author='kale@thekunderts.net',
url='https://github.com/kalekundert/nonstdlib',
download_url='https://github.com/kalekundert/nonstdlib/tarball/'+version,
license='MIT',
description="A collection of general-purpose utilities.",
long_description=open('README.rst').read(),
keywords=['utilities', 'library'],
packages=['nonstdlib'],
)
|
783ee7a17fb3bc930618dc5788ec727a3d8f1a1b
|
setup.py
|
setup.py
|
import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.7.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.10.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
Upgrade dependency requests to ==2.10.0
|
Upgrade dependency requests to ==2.10.0
|
Python
|
mit
|
webkom/django-auth-abakus
|
import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.7.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
Upgrade dependency requests to ==2.10.0
|
import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.10.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
<commit_before>import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.7.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
<commit_msg>Upgrade dependency requests to ==2.10.0<commit_after>
|
import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.10.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.7.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
Upgrade dependency requests to ==2.10.0import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.10.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
<commit_before>import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.7.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
<commit_msg>Upgrade dependency requests to ==2.10.0<commit_after>import re
import os
from setuptools import setup, find_packages
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
with open('abakus/__init__.py', 'r') as fd:
version = re.search(
r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(),
re.MULTILINE
).group(1)
setup(
name="django-auth-abakus",
version='1.1.0',
url='http://github.com/webkom/django-auth-abakus',
author='Webkom, Abakus Linjeforening',
author_email='webkom@abakus.no',
description='A django auth module that can be used to to authenticate '
'users against the API of abakus.no.',
packages=find_packages(exclude='tests'),
install_requires=[
'requests==2.7.0',
],
tests_require=[
'django>=1.4',
'requests==2.10.0',
'responses'
],
license='MIT',
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
"Topic :: Software Development :: Libraries :: Python Modules",
"Framework :: Django",
"Environment :: Web Environment",
"Operating System :: OS Independent",
"Natural Language :: English",
]
)
|
04ff7acc10dc722d372db19aaf04296f2a2c63cc
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
],
},
)
|
#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
'train = train.runner:train.console',
],
},
)
|
Create the console script entrypoint.
|
Create the console script entrypoint.
|
Python
|
apache-2.0
|
klmitch/train
|
#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
],
},
)
Create the console script entrypoint.
|
#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
'train = train.runner:train.console',
],
},
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
],
},
)
<commit_msg>Create the console script entrypoint.<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
'train = train.runner:train.console',
],
},
)
|
#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
],
},
)
Create the console script entrypoint.#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
'train = train.runner:train.console',
],
},
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
],
},
)
<commit_msg>Create the console script entrypoint.<commit_after>#!/usr/bin/env python
import os
from setuptools import setup
def readreq(filename):
result = []
with open(filename) as f:
for req in f:
req = req.lstrip()
if req.startswith('-e ') or req.startswith('http:'):
idx = req.find('#egg=')
if idx >= 0:
req = req[idx + 5:].partition('#')[0].strip()
else:
pass
else:
req = req.partition('#')[0].strip()
if not req:
continue
result.append(req)
return result
def readfile(filename):
with open(filename) as f:
return f.read()
setup(
name='train',
version='0.1.0',
author='Kevin L. Mitchell',
author_email='kevin.mitchell@rackspace.com',
url='https://github.com/klmitch/train',
description="Turnstile Benchmarking Tool",
long_description=readfile('README.rst'),
license='Apache License (2.0)',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
],
packages=['train'],
install_requires=readreq('.requires'),
tests_require=readreq('.test-requires'),
entry_points={
'console_scripts': [
'train = train.runner:train.console',
],
},
)
|
2ed7e3af5f6ab586dbad33e566f61a0a3c3ff61b
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
DESCRIPTION = "<placeholder>"
else:
DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=(DESCRIPTION),
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
|
#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
LONG_DESCRIPTION = "Coming soon..."
else:
LONG_DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=LONG_DESCRIPTION,
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
|
Deploy Travis CI build 715 to GitHub
|
Deploy Travis CI build 715 to GitHub
|
Python
|
mit
|
jacebrowning/template-python-demo
|
#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
DESCRIPTION = "<placeholder>"
else:
DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=(DESCRIPTION),
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
Deploy Travis CI build 715 to GitHub
|
#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
LONG_DESCRIPTION = "Coming soon..."
else:
LONG_DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=LONG_DESCRIPTION,
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
|
<commit_before>#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
DESCRIPTION = "<placeholder>"
else:
DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=(DESCRIPTION),
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
<commit_msg>Deploy Travis CI build 715 to GitHub<commit_after>
|
#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
LONG_DESCRIPTION = "Coming soon..."
else:
LONG_DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=LONG_DESCRIPTION,
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
|
#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
DESCRIPTION = "<placeholder>"
else:
DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=(DESCRIPTION),
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
Deploy Travis CI build 715 to GitHub#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
LONG_DESCRIPTION = "Coming soon..."
else:
LONG_DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=LONG_DESCRIPTION,
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
|
<commit_before>#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
DESCRIPTION = "<placeholder>"
else:
DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=(DESCRIPTION),
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
<commit_msg>Deploy Travis CI build 715 to GitHub<commit_after>#!/usr/bin/env python
"""Setup script for PythonTemplateDemo."""
import setuptools
from demo import __project__, __version__
try:
README = open("README.rst").read()
CHANGELOG = open("CHANGELOG.rst").read()
except IOError:
LONG_DESCRIPTION = "Coming soon..."
else:
LONG_DESCRIPTION = README + '\n' + CHANGELOG
setuptools.setup(
name=__project__,
version=__version__,
description="A sample project templated from jacebrowning/template-python.",
url='https://github.com/jacebrowning/template-python-demo',
author='Jace Browning',
author_email='jacebrowning@gmail.com',
packages=setuptools.find_packages(),
entry_points={'console_scripts': []},
long_description=LONG_DESCRIPTION,
license='MIT',
classifiers=[
# TODO: update this list to match your application: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
install_requires=open("requirements.txt").readlines(),
)
|
c03fb3e27b4d526916192739ac6879f186f4d749
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest'
]
}
)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest',
'pytest-cov'
]
}
)
|
Add the py.test coverage plugin package (pytest-cov) as an extra dependency.
|
Add the py.test coverage plugin package (pytest-cov) as an extra
dependency.
|
Python
|
mit
|
TkTech/Jawa,TkTech/Jawa
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest'
]
}
)
Add the py.test coverage plugin package (pytest-cov) as an extra
dependency.
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest',
'pytest-cov'
]
}
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest'
]
}
)
<commit_msg>Add the py.test coverage plugin package (pytest-cov) as an extra
dependency.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest',
'pytest-cov'
]
}
)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest'
]
}
)
Add the py.test coverage plugin package (pytest-cov) as an extra
dependency.#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest',
'pytest-cov'
]
}
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest'
]
}
)
<commit_msg>Add the py.test coverage plugin package (pytest-cov) as an extra
dependency.<commit_after>#!/usr/bin/env python
# -*- coding: utf8 -*-
from setuptools import setup, find_packages
setup(
name="jawa",
packages=find_packages(),
version="1.0",
description="Doing fun stuff with JVM ClassFiles.",
author="Tyler Kennedy",
author_email="tk@tkte.ch",
url="http://github.com/TkTech/Jawa",
keywords=["java", "disassembly", "disassembler"],
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Disassemblers"
],
extras_require={
'dev': [
'pytest',
'pytest-cov'
]
}
)
|
af406d09ded10dbdfcc1d752f1cfe1d4bfe8f7d5
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
Change version and license for 0.2
|
Change version and license for 0.2
git-svn-id: 150a648d6f30c8fc6b9d405c0558dface314bbdd@617 b426a367-1105-0410-b9ff-cdf4ab011145
|
Python
|
bsd-3-clause
|
daf/OWSLib,datagovuk/OWSLib,kalxas/OWSLib,bird-house/OWSLib,kwilcox/OWSLib,datagovuk/OWSLib,KeyproOy/OWSLib,Jenselme/OWSLib,b-cube/OWSLib,QuLogic/OWSLib,robmcmullen/OWSLib,geopython/OWSLib,daf/OWSLib,jaygoldfinch/OWSLib,mbertrand/OWSLib,ocefpaf/OWSLib,tomkralidis/OWSLib,dblodgett-usgs/OWSLib,JuergenWeichand/OWSLib,daf/OWSLib,geographika/OWSLib,jachym/OWSLib,menegon/OWSLib,datagovuk/OWSLib,jaygoldfinch/OWSLib,gfusca/OWSLib
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
Change version and license for 0.2
git-svn-id: 150a648d6f30c8fc6b9d405c0558dface314bbdd@617 b426a367-1105-0410-b9ff-cdf4ab011145
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
<commit_before>
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
<commit_msg>Change version and license for 0.2
git-svn-id: 150a648d6f30c8fc6b9d405c0558dface314bbdd@617 b426a367-1105-0410-b9ff-cdf4ab011145<commit_after>
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
Change version and license for 0.2
git-svn-id: 150a648d6f30c8fc6b9d405c0558dface314bbdd@617 b426a367-1105-0410-b9ff-cdf4ab011145
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
<commit_before>
from setuptools import setup
setup(name = 'OWSLib',
version = '0.1.0',
description = 'OGC Web Service utility library',
license = 'GPL',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
<commit_msg>Change version and license for 0.2
git-svn-id: 150a648d6f30c8fc6b9d405c0558dface314bbdd@617 b426a367-1105-0410-b9ff-cdf4ab011145<commit_after>
from setuptools import setup
setup(name = 'OWSLib',
version = '0.2.0',
description = 'OGC Web Service utility library',
license = 'BSD',
keywords = 'gis ogc ows wfs wms capabilities metadata',
author = 'Sean Gillies',
author_email = 'sgillies@frii.com',
maintainer = 'Sean Gillies',
maintainer_email = 'sgillies@frii.com',
url = 'http://trac.gispython.org/projects/PCL/wiki/OwsLib',
packages = ['owslib'],
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering :: GIS',
],
)
|
9c9a2a36a5abbd166b9f67a6927447c14d2264ca
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django<1.8',
'django-hvad',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django',
'django-hvad<=1.0.0',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
Fix django-hvad version in requirements
|
Fix django-hvad version in requirements
|
Python
|
bsd-3-clause
|
diadzine/django-simple-multilingual-survey,diadzine/django-simple-multilingual-survey
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django<1.8',
'django-hvad',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Fix django-hvad version in requirements
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django',
'django-hvad<=1.0.0',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django<1.8',
'django-hvad',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Fix django-hvad version in requirements<commit_after>
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django',
'django-hvad<=1.0.0',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django<1.8',
'django-hvad',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Fix django-hvad version in requirementsimport os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django',
'django-hvad<=1.0.0',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django<1.8',
'django-hvad',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Fix django-hvad version in requirements<commit_after>import os
from setuptools import setup, find_packages
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
import multilingual_survey
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-multilingual-survey',
version=multilingual_survey.__version__,
packages=find_packages(),
include_package_data=True,
license='BSD License', # example license
description='A simple Django app to conduct Web-based multilingual surveys.',
long_description=README,
url='https://github.com/diadzine/django-multilingual-survey',
author='Aymeric Bringard',
author_email='diadzine@gmail.com',
install_requires=[
'Django',
'django-hvad<=1.0.0',
],
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
1f9bc1b6f9a796458d104c01b9a344cbb0c84a9b
|
Lib/fontParts/fontshell/groups.py
|
Lib/fontParts/fontshell/groups.py
|
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
|
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
|
Add defcon implementation of group lookup methods.
|
Add defcon implementation of group lookup methods.
|
Python
|
mit
|
robofab-developers/fontParts,robofab-developers/fontParts
|
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
Add defcon implementation of group lookup methods.
|
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
|
<commit_before>import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
<commit_msg>Add defcon implementation of group lookup methods.<commit_after>
|
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
|
import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
Add defcon implementation of group lookup methods.import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
|
<commit_before>import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
<commit_msg>Add defcon implementation of group lookup methods.<commit_after>import defcon
from fontParts.base import BaseGroups
from fontParts.fontshell.base import RBaseObject
class RGroups(RBaseObject, BaseGroups):
wrapClass = defcon.Groups
def _get_base_side1KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide1Groups")
def _get_base_side2KerningGroups(self):
return self.naked().getRepresentation("defcon.groups.kerningSide2Groups")
def _items(self):
return self.naked().items()
def _contains(self, key):
return key in self.naked()
def _setItem(self, key, value):
self.naked()[key] = value
def _getItem(self, key):
return self.naked()[key]
def _delItem(self, key):
del self.naked()[key]
|
98fdc28d0c506e7fb44648256a56e7978d111073
|
tests/base.py
|
tests/base.py
|
import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception, e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
|
import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception as e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
|
Fix exception test function for python3.
|
Fix exception test function for python3.
|
Python
|
bsd-3-clause
|
agamdua/github3.py,christophelec/github3.py,degustaf/github3.py,jim-minter/github3.py,icio/github3.py,itsmemattchung/github3.py,balloob/github3.py,sigmavirus24/github3.py,h4ck3rm1k3/github3.py,wbrefvem/github3.py,krxsky/github3.py,ueg1990/github3.py
|
import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception, e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
Fix exception test function for python3.
|
import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception as e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
|
<commit_before>import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception, e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
<commit_msg>Fix exception test function for python3.<commit_after>
|
import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception as e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
|
import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception, e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
Fix exception test function for python3.import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception as e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
|
<commit_before>import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception, e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
<commit_msg>Fix exception test function for python3.<commit_after>import sys
import unittest
#sys.path.insert(0, os.path.abspath('..'))
import github3
class BaseTest(unittest.TestCase):
api = 'https://api.github.com/'
kr = 'kennethreitz'
sigm = 'sigmavirus24'
todo = 'Todo.txt-python'
gh3py = 'github3py'
def setUp(self):
super(BaseTest, self).setUp()
self.g = github3.GitHub()
def assertIsInstance(self, obj, cls):
"""Assert that ``obj`` is an instance of ``cls``"""
if not isinstance(obj, cls):
self.fail()
def assertRaisesError(self, func, *args, **kwargs):
"""Assert that func raises github3.Error"""
try:
func(*args, **kwargs)
except github3.Error:
pass
except Exception as e:
self.fail('{0}({1}, {2}) raises unexpected exception: {3}'.format(
str(func), str(args), str(kwargs), str(e)))
def assertIsNotNone(self, value, msg=None):
if sys.version_info >= (2, 7):
super(BaseTest, self).assertIsNotNone(value, msg)
else:
try:
assert value is not None
except AssertionError:
self.fail('AssertionError: ' + msg)
|
fbfd656d0c11bfbc6500fcdffdfae422ab50a08f
|
lancet/contrib/dploi.py
|
lancet/contrib/dploi.py
|
import click
@click.command()
@click.argument('environment')
@click.pass_obj
def ssh(lancet, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
lancet.defer_to_shell('ssh', '-p', str(config.get('port', 20)), host)
|
from shlex import quote
import click
@click.command()
@click.option('-p', '--print/--exec', 'print_cmd', default=False,
help='Print the command instead of executing it.')
@click.argument('environment')
@click.pass_obj
def ssh(lancet, print_cmd, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
cmd = ['ssh', '-p', str(config.get('port', 20)), host]
if print_cmd:
click.echo(' '.join(quote(s) for s in cmd))
else:
lancet.defer_to_shell(*cmd)
|
Allow to print the ssh command
|
Allow to print the ssh command
|
Python
|
mit
|
GaretJax/lancet,GaretJax/lancet
|
import click
@click.command()
@click.argument('environment')
@click.pass_obj
def ssh(lancet, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
lancet.defer_to_shell('ssh', '-p', str(config.get('port', 20)), host)
Allow to print the ssh command
|
from shlex import quote
import click
@click.command()
@click.option('-p', '--print/--exec', 'print_cmd', default=False,
help='Print the command instead of executing it.')
@click.argument('environment')
@click.pass_obj
def ssh(lancet, print_cmd, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
cmd = ['ssh', '-p', str(config.get('port', 20)), host]
if print_cmd:
click.echo(' '.join(quote(s) for s in cmd))
else:
lancet.defer_to_shell(*cmd)
|
<commit_before>import click
@click.command()
@click.argument('environment')
@click.pass_obj
def ssh(lancet, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
lancet.defer_to_shell('ssh', '-p', str(config.get('port', 20)), host)
<commit_msg>Allow to print the ssh command<commit_after>
|
from shlex import quote
import click
@click.command()
@click.option('-p', '--print/--exec', 'print_cmd', default=False,
help='Print the command instead of executing it.')
@click.argument('environment')
@click.pass_obj
def ssh(lancet, print_cmd, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
cmd = ['ssh', '-p', str(config.get('port', 20)), host]
if print_cmd:
click.echo(' '.join(quote(s) for s in cmd))
else:
lancet.defer_to_shell(*cmd)
|
import click
@click.command()
@click.argument('environment')
@click.pass_obj
def ssh(lancet, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
lancet.defer_to_shell('ssh', '-p', str(config.get('port', 20)), host)
Allow to print the ssh commandfrom shlex import quote
import click
@click.command()
@click.option('-p', '--print/--exec', 'print_cmd', default=False,
help='Print the command instead of executing it.')
@click.argument('environment')
@click.pass_obj
def ssh(lancet, print_cmd, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
cmd = ['ssh', '-p', str(config.get('port', 20)), host]
if print_cmd:
click.echo(' '.join(quote(s) for s in cmd))
else:
lancet.defer_to_shell(*cmd)
|
<commit_before>import click
@click.command()
@click.argument('environment')
@click.pass_obj
def ssh(lancet, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
lancet.defer_to_shell('ssh', '-p', str(config.get('port', 20)), host)
<commit_msg>Allow to print the ssh command<commit_after>from shlex import quote
import click
@click.command()
@click.option('-p', '--print/--exec', 'print_cmd', default=False,
help='Print the command instead of executing it.')
@click.argument('environment')
@click.pass_obj
def ssh(lancet, print_cmd, environment):
"""
SSH into the given environment, based on the dploi configuration.
"""
namespace = {}
with open('deployment.py') as fh:
code = compile(fh.read(), 'deployment.py', 'exec')
exec(code, {}, namespace)
config = namespace['settings'][environment]
host = '{}@{}'.format(config['user'], config['hosts'][0])
cmd = ['ssh', '-p', str(config.get('port', 20)), host]
if print_cmd:
click.echo(' '.join(quote(s) for s in cmd))
else:
lancet.defer_to_shell(*cmd)
|
d03657217cfd019bb55a4895a4cc6b0a80068ff0
|
bluebottle/bb_projects/migrations/0003_auto_20160815_1658.py
|
bluebottle/bb_projects/migrations/0003_auto_20160815_1658.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
try:
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
except ProjectPhase.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
|
Make the status data migration optional
|
Make the status data migration optional
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
Make the status data migration optional
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
try:
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
except ProjectPhase.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
<commit_msg>Make the status data migration optional<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
try:
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
except ProjectPhase.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
Make the status data migration optional# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
try:
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
except ProjectPhase.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
<commit_msg>Make the status data migration optional<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-08-15 14:58
from __future__ import unicode_literals
from django.db import migrations
def update_status_names(apps, schema_editor):
ProjectPhase = apps.get_model('bb_projects', 'ProjectPhase')
updates = {
'plan-new': 'Plan - Draft',
'voting': 'Voting - Running',
'campaign': 'Project - Running',
'done-complete': 'Project - Realised',
'done-incomplete': 'Project - Done',
'closed': 'Rejected / Cancelled'
}
for slug, new_name in updates.items():
try:
phase = ProjectPhase.objects.get(slug=slug)
phase.name = new_name
phase.save()
except ProjectPhase.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('bb_projects', '0002_remove_projecttheme_name_nl'),
]
operations = [
migrations.RunPython(update_status_names)
]
|
12c833b1097579ca4a0162dca0d789b787f7d237
|
oscar/core/compat.py
|
oscar/core/compat.py
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in forieng key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
|
Add two settings related to custom user model
|
Add two settings related to custom user model
|
Python
|
bsd-3-clause
|
josesanch/django-oscar,binarydud/django-oscar,Idematica/django-oscar,QLGu/django-oscar,dongguangming/django-oscar,kapari/django-oscar,jmt4/django-oscar,amirrpp/django-oscar,WadeYuChen/django-oscar,rocopartners/django-oscar,rocopartners/django-oscar,amirrpp/django-oscar,ka7eh/django-oscar,ahmetdaglarbas/e-commerce,jinnykoo/christmas,michaelkuty/django-oscar,django-oscar/django-oscar,saadatqadri/django-oscar,jinnykoo/christmas,michaelkuty/django-oscar,monikasulik/django-oscar,pdonadeo/django-oscar,pasqualguerrero/django-oscar,QLGu/django-oscar,kapari/django-oscar,pasqualguerrero/django-oscar,vovanbo/django-oscar,eddiep1101/django-oscar,saadatqadri/django-oscar,faratro/django-oscar,ka7eh/django-oscar,spartonia/django-oscar,ka7eh/django-oscar,WillisXChen/django-oscar,rocopartners/django-oscar,nickpack/django-oscar,bschuon/django-oscar,adamend/django-oscar,amirrpp/django-oscar,manevant/django-oscar,bschuon/django-oscar,jinnykoo/wuyisj,jlmadurga/django-oscar,taedori81/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,WadeYuChen/django-oscar,john-parton/django-oscar,faratro/django-oscar,MatthewWilkes/django-oscar,mexeniz/django-oscar,jlmadurga/django-oscar,jinnykoo/wuyisj.com,pdonadeo/django-oscar,sonofatailor/django-oscar,QLGu/django-oscar,anentropic/django-oscar,nfletton/django-oscar,manevant/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,taedori81/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,itbabu/django-oscar,saadatqadri/django-oscar,okfish/django-oscar,nfletton/django-oscar,Jannes123/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,anentropic/django-oscar,jinnykoo/wuyisj,anentropic/django-oscar,kapari/django-oscar,sasha0/django-oscar,thechampanurag/django-oscar,amirrpp/django-oscar,bnprk/django-oscar,ademuk/django-oscar,kapari/django-oscar,dongguangming/django-oscar,Bogh/django-oscar,adamend/django-oscar,pdonadeo/django-oscar,makielab/django-oscar,kapt/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj.com,spartonia/django-oscar,mexeniz/django-oscar,makielab/django-oscar,jinnykoo/wuyisj,WillisXChen/django-oscar,jmt4/django-oscar,marcoantoniooliveira/labweb,WadeYuChen/django-oscar,bnprk/django-oscar,makielab/django-oscar,MatthewWilkes/django-oscar,taedori81/django-oscar,pdonadeo/django-oscar,binarydud/django-oscar,nickpack/django-oscar,marcoantoniooliveira/labweb,ademuk/django-oscar,Bogh/django-oscar,adamend/django-oscar,nickpack/django-oscar,rocopartners/django-oscar,michaelkuty/django-oscar,DrOctogon/unwash_ecom,john-parton/django-oscar,faratro/django-oscar,spartonia/django-oscar,sonofatailor/django-oscar,Bogh/django-oscar,vovanbo/django-oscar,monikasulik/django-oscar,ademuk/django-oscar,itbabu/django-oscar,solarissmoke/django-oscar,itbabu/django-oscar,WadeYuChen/django-oscar,mexeniz/django-oscar,WillisXChen/django-oscar,sasha0/django-oscar,vovanbo/django-oscar,DrOctogon/unwash_ecom,eddiep1101/django-oscar,Jannes123/django-oscar,faratro/django-oscar,QLGu/django-oscar,lijoantony/django-oscar,ademuk/django-oscar,machtfit/django-oscar,kapt/django-oscar,adamend/django-oscar,jmt4/django-oscar,Bogh/django-oscar,josesanch/django-oscar,sonofatailor/django-oscar,bnprk/django-oscar,okfish/django-oscar,jinnykoo/wuyisj.com,vovanbo/django-oscar,thechampanurag/django-oscar,josesanch/django-oscar,makielab/django-oscar,saadatqadri/django-oscar,nfletton/django-oscar,okfish/django-oscar,taedori81/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,manevant/django-oscar,Jannes123/django-oscar,binarydud/django-oscar,bschuon/django-oscar,django-oscar/django-oscar,jinnykoo/christmas,machtfit/django-oscar,jlmadurga/django-oscar,marcoantoniooliveira/labweb,lijoantony/django-oscar,sasha0/django-oscar,dongguangming/django-oscar,jinnykoo/wuyisj,DrOctogon/unwash_ecom,spartonia/django-oscar,MatthewWilkes/django-oscar,nickpack/django-oscar,ka7eh/django-oscar,mexeniz/django-oscar,thechampanurag/django-oscar,jmt4/django-oscar,elliotthill/django-oscar,bnprk/django-oscar,pasqualguerrero/django-oscar,pasqualguerrero/django-oscar,WillisXChen/django-oscar,Jannes123/django-oscar,kapt/django-oscar,sonofatailor/django-oscar,jinnykoo/wuyisj.com,Idematica/django-oscar,sasha0/django-oscar,okfish/django-oscar,marcoantoniooliveira/labweb,thechampanurag/django-oscar,eddiep1101/django-oscar,anentropic/django-oscar,lijoantony/django-oscar,michaelkuty/django-oscar,elliotthill/django-oscar,Idematica/django-oscar,ahmetdaglarbas/e-commerce,monikasulik/django-oscar,MatthewWilkes/django-oscar,ahmetdaglarbas/e-commerce,django-oscar/django-oscar,manevant/django-oscar,dongguangming/django-oscar,monikasulik/django-oscar,machtfit/django-oscar,bschuon/django-oscar,elliotthill/django-oscar,eddiep1101/django-oscar,ahmetdaglarbas/e-commerce
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in forieng key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
Add two settings related to custom user model
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
|
<commit_before>from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in forieng key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
<commit_msg>Add two settings related to custom user model<commit_after>
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
|
from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in forieng key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
Add two settings related to custom user modelfrom django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
|
<commit_before>from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in forieng key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
<commit_msg>Add two settings related to custom user model<commit_after>from django.conf import settings
from django.contrib.auth.models import User
def get_user_model():
"""
Return the User model
Using this function instead of Django 1.5's get_user_model allows backwards
compatibility with Django 1.4.
"""
try:
# Django 1.5+
from django.contrib.auth import get_user_model
except ImportError:
# Django <= 1.4
model = User
else:
model = get_user_model()
# Test if user model has any custom fields and add attributes to the _meta
# class
core_fields = set([f.name for f in User._meta.fields])
model_fields = set([f.name for f in model._meta.fields])
new_fields = model_fields.difference(core_fields)
model._meta.has_additional_fields = len(new_fields) > 0
model._meta.additional_fields = new_fields
return model
# A setting that can be used in foreign key declarations
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
# Two additional settings that are useful in South migrations when
# specifying the user model in the FakeORM
AUTH_USER_APP_LABEL, AUTH_USER_MODEL_NAME = AUTH_USER_MODEL.split('.')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.