commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
807e1315c2abb6c493eca575f478ce7b69173d6f
|
pajbot/eventloop.py
|
pajbot/eventloop.py
|
import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "exception Exception" to not catch KeyboardInterrupt and SystemExit
# (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
|
import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "except Exception" to not catch KeyboardInterrupt and SystemExit (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
|
Update comment to be a bit more helpful
|
Update comment to be a bit more helpful
|
Python
|
mit
|
pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot,pajlada/pajbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot
|
import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "exception Exception" to not catch KeyboardInterrupt and SystemExit
# (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
Update comment to be a bit more helpful
|
import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "except Exception" to not catch KeyboardInterrupt and SystemExit (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
|
<commit_before>import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "exception Exception" to not catch KeyboardInterrupt and SystemExit
# (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
<commit_msg>Update comment to be a bit more helpful<commit_after>
|
import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "except Exception" to not catch KeyboardInterrupt and SystemExit (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
|
import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "exception Exception" to not catch KeyboardInterrupt and SystemExit
# (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
Update comment to be a bit more helpfulimport logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "except Exception" to not catch KeyboardInterrupt and SystemExit (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
|
<commit_before>import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "exception Exception" to not catch KeyboardInterrupt and SystemExit
# (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
<commit_msg>Update comment to be a bit more helpful<commit_after>import logging
from irc.schedule import IScheduler
from tempora import schedule
from tempora.schedule import Scheduler
log = logging.getLogger(__name__)
# same as InvokeScheduler from the original implementation,
# but with the extra try-catch
class SafeInvokeScheduler(Scheduler):
"""
Command targets are functions to be invoked on schedule.
"""
def run(self, command):
try:
command.target()
except Exception:
# we do "except Exception" to not catch KeyboardInterrupt and SystemExit (so the bot can properly quit)
log.exception("Logging an uncaught exception (main thread)")
# same as DefaultScheduler from the original implementation,
# but extends SafeInvokeScheduler instead
class SafeDefaultScheduler(SafeInvokeScheduler, IScheduler):
def execute_every(self, period, func):
self.add(schedule.PeriodicCommand.after(period, func))
def execute_at(self, when, func):
self.add(schedule.DelayedCommand.at_time(when, func))
def execute_after(self, delay, func):
self.add(schedule.DelayedCommand.after(delay, func))
|
530a26c4a857ee35d841ef1d716021fb2c91524a
|
notesapi/v1/migrations/0001_initial.py
|
notesapi/v1/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
|
# -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
|
Add auth models and validators to initial db migration.
|
Add auth models and validators to initial db migration.
|
Python
|
agpl-3.0
|
edx/edx-notes-api,edx/edx-notes-api
|
# -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
Add auth models and validators to initial db migration.
|
# -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
|
<commit_before># -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
<commit_msg>Add auth models and validators to initial db migration.<commit_after>
|
# -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
|
# -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
Add auth models and validators to initial db migration.# -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
|
<commit_before># -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
<commit_msg>Add auth models and validators to initial db migration.<commit_after># -*- coding: utf-8 -*-
""" Initial migration file for creating Note model """
from __future__ import unicode_literals
import django.contrib.auth.models
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
""" Initial migration file for creating Note model """
dependencies = [
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user_id', models.CharField(
help_text=b'Anonymized user id, not course specific', max_length=255, db_index=True
)),
('course_id', models.CharField(max_length=255, db_index=True)),
('usage_id', models.CharField(help_text=b'ID of XBlock where the text comes from', max_length=255)),
('quote', models.TextField(default=b'')),
('text', models.TextField(default=b'', help_text=b"Student's thoughts on the quote", blank=True)),
('ranges', models.TextField(help_text=b'JSON, describes position of quote in the source text')),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
],
options={
},
bases=(models.Model,),
),
]
|
71166b445eb5b4aec407b743f8167842e21ed28f
|
dataedit/templatetags/dataedit/taghandler.py
|
dataedit/templatetags/dataedit/taghandler.py
|
from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r,g,b = webcolors.hex_to_rgb(color_hex)
L = 0.2126 * r + 0.7152 * g+ 0.0722 * b
print((r,g,b), L, 0.279*255)
if L < 0.279*255:
return "#FFFFFF"
else:
return "#000000"
|
from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r, g, b = webcolors.hex_to_rgb(color_hex)
# Calculate brightness of the background and compare to threshold
if 0.2126 * r + 0.7152 * g+ 0.0722 * b < 0.279*255:
return "#FFFFFF"
else:
return "#000000"
|
Remove unnecessary variable assignment and print
|
Remove unnecessary variable assignment and print
|
Python
|
agpl-3.0
|
openego/oeplatform,tom-heimbrodt/oeplatform,tom-heimbrodt/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform,tom-heimbrodt/oeplatform
|
from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r,g,b = webcolors.hex_to_rgb(color_hex)
L = 0.2126 * r + 0.7152 * g+ 0.0722 * b
print((r,g,b), L, 0.279*255)
if L < 0.279*255:
return "#FFFFFF"
else:
return "#000000"Remove unnecessary variable assignment and print
|
from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r, g, b = webcolors.hex_to_rgb(color_hex)
# Calculate brightness of the background and compare to threshold
if 0.2126 * r + 0.7152 * g+ 0.0722 * b < 0.279*255:
return "#FFFFFF"
else:
return "#000000"
|
<commit_before>from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r,g,b = webcolors.hex_to_rgb(color_hex)
L = 0.2126 * r + 0.7152 * g+ 0.0722 * b
print((r,g,b), L, 0.279*255)
if L < 0.279*255:
return "#FFFFFF"
else:
return "#000000"<commit_msg>Remove unnecessary variable assignment and print<commit_after>
|
from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r, g, b = webcolors.hex_to_rgb(color_hex)
# Calculate brightness of the background and compare to threshold
if 0.2126 * r + 0.7152 * g+ 0.0722 * b < 0.279*255:
return "#FFFFFF"
else:
return "#000000"
|
from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r,g,b = webcolors.hex_to_rgb(color_hex)
L = 0.2126 * r + 0.7152 * g+ 0.0722 * b
print((r,g,b), L, 0.279*255)
if L < 0.279*255:
return "#FFFFFF"
else:
return "#000000"Remove unnecessary variable assignment and printfrom django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r, g, b = webcolors.hex_to_rgb(color_hex)
# Calculate brightness of the background and compare to threshold
if 0.2126 * r + 0.7152 * g+ 0.0722 * b < 0.279*255:
return "#FFFFFF"
else:
return "#000000"
|
<commit_before>from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r,g,b = webcolors.hex_to_rgb(color_hex)
L = 0.2126 * r + 0.7152 * g+ 0.0722 * b
print((r,g,b), L, 0.279*255)
if L < 0.279*255:
return "#FFFFFF"
else:
return "#000000"<commit_msg>Remove unnecessary variable assignment and print<commit_after>from django import template
from dataedit import models
import webcolors
register = template.Library()
@register.assignment_tag
def get_tags():
return models.Tag.objects.all()[:10]
@register.simple_tag()
def readable_text_color(color_hex):
r, g, b = webcolors.hex_to_rgb(color_hex)
# Calculate brightness of the background and compare to threshold
if 0.2126 * r + 0.7152 * g+ 0.0722 * b < 0.279*255:
return "#FFFFFF"
else:
return "#000000"
|
42d77d028ae19cee3ce9890ca757d1cb95678547
|
testsettings.py
|
testsettings.py
|
DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'
|
DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
'djcelery',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'
|
Update to include djcelery in apps for prereq migrations to run
|
Update to include djcelery in apps for prereq migrations to run
|
Python
|
bsd-3-clause
|
westerncapelabs/django-messaging-subscription,westerncapelabs/django-messaging-subscription
|
DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'Update to include djcelery in apps for prereq migrations to run
|
DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
'djcelery',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'
|
<commit_before>DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'<commit_msg>Update to include djcelery in apps for prereq migrations to run<commit_after>
|
DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
'djcelery',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'
|
DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'Update to include djcelery in apps for prereq migrations to runDEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
'djcelery',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'
|
<commit_before>DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'<commit_msg>Update to include djcelery in apps for prereq migrations to run<commit_after>DEBUG = True
SECRET_KEY = 'ItsSekret'
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'subscriptionstore.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
# Third-party apps
'south',
'tastypie',
'djcelery',
# Us
'subscription'
]
VUMI_GO_ACCOUNT_KEY = "replaceme"
VUMI_GO_CONVERSATION_KEY = "replaceme"
VUMI_GO_ACCOUNT_TOKEN = "replaceme"
ROOT_URLCONF = 'subscription.urls'
|
93dac2902ff11e8198c2c58f48b2aa15f2c01f6e
|
apps/submission/views.py
|
apps/submission/views.py
|
from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'pixel_template.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
|
from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'meta.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
|
Update pixel's template file name to meta.xlsx
|
Update pixel's template file name to meta.xlsx
|
Python
|
bsd-3-clause
|
Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel,Candihub/pixel
|
from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'pixel_template.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
Update pixel's template file name to meta.xlsx
|
from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'meta.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
|
<commit_before>from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'pixel_template.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
<commit_msg>Update pixel's template file name to meta.xlsx<commit_after>
|
from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'meta.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
|
from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'pixel_template.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
Update pixel's template file name to meta.xlsxfrom pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'meta.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
|
<commit_before>from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'pixel_template.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
<commit_msg>Update pixel's template file name to meta.xlsx<commit_after>from pathlib import PurePath
from tempfile import mkdtemp
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from django.views.generic import TemplateView, View
from .io.xlsx import generate_template
class DownloadXLSXTemplateView(LoginRequiredMixin, TemplateView):
template_name = 'submission/download_xlsx_template.html'
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
ctx.update({
'step': 'download',
})
return ctx
class GenerateXLSXTemplateView(LoginRequiredMixin, View):
def post(self, request, *args, **kwargs):
template_file_name = 'meta.xlsx'
template_path = PurePath(mkdtemp(), template_file_name)
generate_template(template_path)
response = HttpResponse(
content_type=(
'application/vnd'
'.openxmlformats-officedocument'
'.spreadsheetml'
'.sheet'
)
)
content_disposition = 'attachment; filename="{}"'.format(
template_file_name
)
response['Content-Disposition'] = content_disposition
with open(template_path, 'rb') as template:
response.write(template.read())
return response
|
64fdfe1dc072c3684255f8ecf1895d6350f979b6
|
nova/policies/image_size.py
|
nova/policies/image_size.py
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return image_size_policies
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"""Add 'OS-EXT-IMG-SIZE:size' attribute in the image response.""",
[
{
'method': 'GET',
'path': '/images/{id}'
},
{
'method': 'GET',
'path': '/images/detail'
}
]),
]
def list_rules():
return image_size_policies
|
Add policy description for image size
|
Add policy description for image size
This commit adds policy doc for image size policies.
Partial implement blueprint policy-docs
Change-Id: I0de4aaa47e21c4e156569eebcb495412ab364417
|
Python
|
apache-2.0
|
gooddata/openstack-nova,jianghuaw/nova,Juniper/nova,mahak/nova,rahulunair/nova,gooddata/openstack-nova,openstack/nova,mikalstill/nova,openstack/nova,rajalokan/nova,vmturbo/nova,Juniper/nova,rajalokan/nova,mahak/nova,jianghuaw/nova,mikalstill/nova,phenoxim/nova,gooddata/openstack-nova,jianghuaw/nova,Juniper/nova,klmitch/nova,klmitch/nova,rajalokan/nova,klmitch/nova,rahulunair/nova,vmturbo/nova,phenoxim/nova,klmitch/nova,gooddata/openstack-nova,vmturbo/nova,openstack/nova,vmturbo/nova,rajalokan/nova,Juniper/nova,rahulunair/nova,jianghuaw/nova,mahak/nova,mikalstill/nova
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return image_size_policies
Add policy description for image size
This commit adds policy doc for image size policies.
Partial implement blueprint policy-docs
Change-Id: I0de4aaa47e21c4e156569eebcb495412ab364417
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"""Add 'OS-EXT-IMG-SIZE:size' attribute in the image response.""",
[
{
'method': 'GET',
'path': '/images/{id}'
},
{
'method': 'GET',
'path': '/images/detail'
}
]),
]
def list_rules():
return image_size_policies
|
<commit_before># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return image_size_policies
<commit_msg>Add policy description for image size
This commit adds policy doc for image size policies.
Partial implement blueprint policy-docs
Change-Id: I0de4aaa47e21c4e156569eebcb495412ab364417<commit_after>
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"""Add 'OS-EXT-IMG-SIZE:size' attribute in the image response.""",
[
{
'method': 'GET',
'path': '/images/{id}'
},
{
'method': 'GET',
'path': '/images/detail'
}
]),
]
def list_rules():
return image_size_policies
|
# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return image_size_policies
Add policy description for image size
This commit adds policy doc for image size policies.
Partial implement blueprint policy-docs
Change-Id: I0de4aaa47e21c4e156569eebcb495412ab364417# Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"""Add 'OS-EXT-IMG-SIZE:size' attribute in the image response.""",
[
{
'method': 'GET',
'path': '/images/{id}'
},
{
'method': 'GET',
'path': '/images/detail'
}
]),
]
def list_rules():
return image_size_policies
|
<commit_before># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return image_size_policies
<commit_msg>Add policy description for image size
This commit adds policy doc for image size policies.
Partial implement blueprint policy-docs
Change-Id: I0de4aaa47e21c4e156569eebcb495412ab364417<commit_after># Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:image-size'
image_size_policies = [
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"""Add 'OS-EXT-IMG-SIZE:size' attribute in the image response.""",
[
{
'method': 'GET',
'path': '/images/{id}'
},
{
'method': 'GET',
'path': '/images/detail'
}
]),
]
def list_rules():
return image_size_policies
|
06db6c3823ae480d0180b747ac475f149b2f8976
|
try_telethon.py
|
try_telethon.py
|
#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=settings.get('session_name', 'anonymous'),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=settings['api_hash'])
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
|
#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
|
Allow integer-only session name and hash for the example
|
Allow integer-only session name and hash for the example
|
Python
|
mit
|
expectocode/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,andr-04/Telethon,LonamiWebs/Telethon,kyasabu/Telethon
|
#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=settings.get('session_name', 'anonymous'),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=settings['api_hash'])
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
Allow integer-only session name and hash for the example
|
#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
|
<commit_before>#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=settings.get('session_name', 'anonymous'),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=settings['api_hash'])
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
<commit_msg>Allow integer-only session name and hash for the example<commit_after>
|
#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
|
#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=settings.get('session_name', 'anonymous'),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=settings['api_hash'])
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
Allow integer-only session name and hash for the example#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
|
<commit_before>#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=settings.get('session_name', 'anonymous'),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=settings['api_hash'])
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
<commit_msg>Allow integer-only session name and hash for the example<commit_after>#!/usr/bin/env python3
import traceback
from telethon.interactive_telegram_client import (InteractiveTelegramClient,
print_title)
def load_settings(path='api/settings'):
"""Loads the user settings located under `api/`"""
result = {}
with open(path, 'r', encoding='utf-8') as file:
for line in file:
value_pair = line.split('=')
left = value_pair[0].strip()
right = value_pair[1].strip()
if right.isnumeric():
result[left] = int(right)
else:
result[left] = right
return result
if __name__ == '__main__':
# Load the settings and initialize the client
settings = load_settings()
client = InteractiveTelegramClient(
session_user_id=str(settings.get('session_name', 'anonymous')),
user_phone=str(settings['user_phone']),
api_id=settings['api_id'],
api_hash=str(settings['api_hash']))
print('Initialization done!')
try:
client.run()
except Exception as e:
print('Unexpected error ({}): {} at\n{}'.format(
type(e), e, traceback.format_exc()))
finally:
print_title('Exit')
print('Thanks for trying the interactive example! Exiting...')
client.disconnect()
|
12e35b703f548df5e57e44446ddd8739f96aef95
|
tartpy/tools.py
|
tartpy/tools.py
|
import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
|
from collections.abc import Mapping, Sequence
import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
def actor_map(f, message):
"""Map a function f:{Actor} -> B to a message."""
if isinstance(message, Actor):
return f(message)
if isinstance(message, Mapping):
return {actor_map(f, key): actor_map(f, value)
for key, value in message.items()}
if isinstance(message, str):
return message
if isinstance(message, Sequence):
return [actor_map(f, value) for value in message]
return message
|
Add a map function over messages
|
Add a map function over messages
|
Python
|
mit
|
waltermoreira/tartpy
|
import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
Add a map function over messages
|
from collections.abc import Mapping, Sequence
import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
def actor_map(f, message):
"""Map a function f:{Actor} -> B to a message."""
if isinstance(message, Actor):
return f(message)
if isinstance(message, Mapping):
return {actor_map(f, key): actor_map(f, value)
for key, value in message.items()}
if isinstance(message, str):
return message
if isinstance(message, Sequence):
return [actor_map(f, value) for value in message]
return message
|
<commit_before>import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
<commit_msg>Add a map function over messages<commit_after>
|
from collections.abc import Mapping, Sequence
import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
def actor_map(f, message):
"""Map a function f:{Actor} -> B to a message."""
if isinstance(message, Actor):
return f(message)
if isinstance(message, Mapping):
return {actor_map(f, key): actor_map(f, value)
for key, value in message.items()}
if isinstance(message, str):
return message
if isinstance(message, Sequence):
return [actor_map(f, value) for value in message]
return message
|
import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
Add a map function over messagesfrom collections.abc import Mapping, Sequence
import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
def actor_map(f, message):
"""Map a function f:{Actor} -> B to a message."""
if isinstance(message, Actor):
return f(message)
if isinstance(message, Mapping):
return {actor_map(f, key): actor_map(f, value)
for key, value in message.items()}
if isinstance(message, str):
return message
if isinstance(message, Sequence):
return [actor_map(f, value) for value in message]
return message
|
<commit_before>import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
<commit_msg>Add a map function over messages<commit_after>from collections.abc import Mapping, Sequence
import time
from .runtime import behavior, Actor, exception_message, Runtime
from .eventloop import EventLoop
class Wait(object):
"""A synchronizing object.
Convenience object to wait for results outside actors.
Use as::
w = Wait()
wait = runtime.create(w.wait_beh)
# now use `wait` as a customer
msg = w.join()
`msg` will be the message sent back to the customer.
"""
POLL_TIME = 0.01 # seconds
def __init__(self):
self.state = None
@behavior
def wait_beh(self, this, message):
self.state = message
def join(self):
while self.state is None:
time.sleep(self.POLL_TIME)
return self.state
def later(actor, t, msg):
EventLoop().later(t, lambda: actor << msg)
@behavior
def log_beh(self, message):
print('LOG:', message)
def actor_map(f, message):
"""Map a function f:{Actor} -> B to a message."""
if isinstance(message, Actor):
return f(message)
if isinstance(message, Mapping):
return {actor_map(f, key): actor_map(f, value)
for key, value in message.items()}
if isinstance(message, str):
return message
if isinstance(message, Sequence):
return [actor_map(f, value) for value in message]
return message
|
c01f4014d3ccda8a168c6298d05e894e381f1ded
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_STDERR
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_BOTH
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
|
Fix for pydocstyle > 1.1.1
|
Fix for pydocstyle > 1.1.1
|
Python
|
mit
|
SublimeLinter/SublimeLinter-pep257
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_STDERR
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
Fix for pydocstyle > 1.1.1
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_BOTH
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_STDERR
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
<commit_msg>Fix for pydocstyle > 1.1.1<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_BOTH
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_STDERR
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
Fix for pydocstyle > 1.1.1#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_BOTH
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_STDERR
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
<commit_msg>Fix for pydocstyle > 1.1.1<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2015-2016 The SublimeLinter Community
# Copyright (c) 2013-2014 Aparajita Fishman
#
# License: MIT
#
"""This module exports the Pydocstyle plugin linter class."""
from SublimeLinter.lint import PythonLinter, highlight, util
class Pydocstyle(PythonLinter):
"""Provides an interface to the pydocstyle python module/script."""
syntax = 'python'
if PythonLinter.which('pydocstyle'):
cmd = 'pydocstyle@python'
else:
cmd = 'pep257@python'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 0.3.0'
regex = r'^.+?:(?P<line>\d+).*:\r?\n\s*(?P<message>.+)$'
multiline = True
default_type = highlight.WARNING
error_stream = util.STREAM_BOTH
line_col_base = (0, 0) # pydocstyle uses one-based line and zero-based column numbers
tempfile_suffix = 'py'
defaults = {
'--add-ignore=': ''
}
inline_overrides = ('add-ignore')
|
d37dc009f1c4f6e8855657dd6dbf17df9332f765
|
test/os_win7.py
|
test/os_win7.py
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
|
Add test for mbed parsing
|
Add test for mbed parsing
|
Python
|
apache-2.0
|
jupe/mbed-ls,jupe/mbed-ls,mazimkhan/mbed-ls,mtmtech/mbed-ls,mazimkhan/mbed-ls,mtmtech/mbed-ls
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
Add test for mbed parsing
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for mbed parsing<commit_after>
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
Add test for mbed parsing#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for mbed parsing<commit_after>#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
|
55fface07089b32a4d39d2e9a6d7deeb9ef0a23e
|
mefdas.py
|
mefdas.py
|
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))
|
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
# mu(∅) := 0
# mu(X) := 1
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
# Random generation of a fuzzy measure mu on a set X
# note: 'undefined' means we have not yet calculated and stored the value of mu for mu(foo)
# copy list of subsets X to my_x
# for each A popped randomly from my_x:
# if mu(A) is undefined:
# min := 0
# max := 1
# for each B in X:
# case B ⊂ A :
# if mu(B) is defined:
# mu(B) = max(mu(B), min)
# case B ⊃ A :
# if mu(B) is defined:
# mu(B) = min(max, mu(B))
# else:
# do nothing
# mu(A) := random value between min and max
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))
|
Add pseudocode for set_fm_for_complex_sets and set_fm_for_trivial_cases
|
Add pseudocode for set_fm_for_complex_sets and set_fm_for_trivial_cases
|
Python
|
apache-2.0
|
ctsit/mdat,indera/mdat
|
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))Add pseudocode for set_fm_for_complex_sets and set_fm_for_trivial_cases
|
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
# mu(∅) := 0
# mu(X) := 1
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
# Random generation of a fuzzy measure mu on a set X
# note: 'undefined' means we have not yet calculated and stored the value of mu for mu(foo)
# copy list of subsets X to my_x
# for each A popped randomly from my_x:
# if mu(A) is undefined:
# min := 0
# max := 1
# for each B in X:
# case B ⊂ A :
# if mu(B) is defined:
# mu(B) = max(mu(B), min)
# case B ⊃ A :
# if mu(B) is defined:
# mu(B) = min(max, mu(B))
# else:
# do nothing
# mu(A) := random value between min and max
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))
|
<commit_before>
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))<commit_msg>Add pseudocode for set_fm_for_complex_sets and set_fm_for_trivial_cases<commit_after>
|
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
# mu(∅) := 0
# mu(X) := 1
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
# Random generation of a fuzzy measure mu on a set X
# note: 'undefined' means we have not yet calculated and stored the value of mu for mu(foo)
# copy list of subsets X to my_x
# for each A popped randomly from my_x:
# if mu(A) is undefined:
# min := 0
# max := 1
# for each B in X:
# case B ⊂ A :
# if mu(B) is defined:
# mu(B) = max(mu(B), min)
# case B ⊃ A :
# if mu(B) is defined:
# mu(B) = min(max, mu(B))
# else:
# do nothing
# mu(A) := random value between min and max
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))
|
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))Add pseudocode for set_fm_for_complex_sets and set_fm_for_trivial_cases
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
# mu(∅) := 0
# mu(X) := 1
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
# Random generation of a fuzzy measure mu on a set X
# note: 'undefined' means we have not yet calculated and stored the value of mu for mu(foo)
# copy list of subsets X to my_x
# for each A popped randomly from my_x:
# if mu(A) is undefined:
# min := 0
# max := 1
# for each B in X:
# case B ⊂ A :
# if mu(B) is defined:
# mu(B) = max(mu(B), min)
# case B ⊃ A :
# if mu(B) is defined:
# mu(B) = min(max, mu(B))
# else:
# do nothing
# mu(A) := random value between min and max
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))
|
<commit_before>
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))<commit_msg>Add pseudocode for set_fm_for_complex_sets and set_fm_for_trivial_cases<commit_after>
#!/usr/bin/env python
"""mefdas.py: Description of what foobar does."""
__author__ = "Philip Chase(pbc@ufl.edu, Chris Barnes(cpb@ufl.edu), Roy Keyes (keyes@ufl.edu), Alex Loiacono (atloiaco@ufl.edu)"
__copyright__ = "Copyright 2015, CTS-IT University of Florida"
class fuzzyMeasure:
'''A class to produce a fuzzy measure of based on a list of criteria'''
def __init__(self, number_of_criteria):
# build a data structure to hold all possible subsets of a set of size = number_of_criteria
self.data = []
def set_fm_for_trivial_cases(self):
# set fuzzyMeasure for empty and complete sets
# mu(∅) := 0
# mu(X) := 1
def set_fm_for_singleton_sets(self):
# set fuzzyMeasure for sets with exactly one member
def set_fm_for_complex_sets(self):
# set fuzzyMeasure for sets with 2 or more members
# Random generation of a fuzzy measure mu on a set X
# note: 'undefined' means we have not yet calculated and stored the value of mu for mu(foo)
# copy list of subsets X to my_x
# for each A popped randomly from my_x:
# if mu(A) is undefined:
# min := 0
# max := 1
# for each B in X:
# case B ⊂ A :
# if mu(B) is defined:
# mu(B) = max(mu(B), min)
# case B ⊃ A :
# if mu(B) is defined:
# mu(B) = min(max, mu(B))
# else:
# do nothing
# mu(A) := random value between min and max
if __name__ == "__main__":
import sys
mefdas(int(sys.argv[1]))
|
d561796c812bfdde822380851a0583db8726b798
|
hooks/post_gen_project.py
|
hooks/post_gen_project.py
|
import os
src = '{{cookiecutter.repo_name}}/src/utils/prepare-commit-msg.py'
dst = '{{cookiecutter.repo_name}}/.git/hooks/prepare-commit-msg'
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.Popen('git', 'init', project_dir)
process.wait()
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
|
Add git init to post generate hook
|
Add git init to post generate hook
|
Python
|
mit
|
Empiria/matador-cookiecutter
|
import os
src = '{{cookiecutter.repo_name}}/src/utils/prepare-commit-msg.py'
dst = '{{cookiecutter.repo_name}}/.git/hooks/prepare-commit-msg'
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
Add git init to post generate hook
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.Popen('git', 'init', project_dir)
process.wait()
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
|
<commit_before>import os
src = '{{cookiecutter.repo_name}}/src/utils/prepare-commit-msg.py'
dst = '{{cookiecutter.repo_name}}/.git/hooks/prepare-commit-msg'
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
<commit_msg>Add git init to post generate hook<commit_after>
|
import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.Popen('git', 'init', project_dir)
process.wait()
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
|
import os
src = '{{cookiecutter.repo_name}}/src/utils/prepare-commit-msg.py'
dst = '{{cookiecutter.repo_name}}/.git/hooks/prepare-commit-msg'
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
Add git init to post generate hookimport os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.Popen('git', 'init', project_dir)
process.wait()
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
|
<commit_before>import os
src = '{{cookiecutter.repo_name}}/src/utils/prepare-commit-msg.py'
dst = '{{cookiecutter.repo_name}}/.git/hooks/prepare-commit-msg'
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
<commit_msg>Add git init to post generate hook<commit_after>import os
import subprocess
project_dir = '{{cookiecutter.repo_name}}'
hooks_dir = os.path.join(project_dir, '.git/hooks')
src = os.path.join(project_dir, 'src/utils/prepare-commit-msg.py')
dst = os.path.join(hooks_dir, 'prepare-commit-msg')
process = subprocess.Popen('git', 'init', project_dir)
process.wait()
os.mkdir('{{cookiecutter.repo_name}}/.git/hooks')
os.symlink(src, dst)
|
347853290ebc4f5c47430ffce7d603eb4fead2d9
|
cpt/test/integration/update_python_reqs_test.py
|
cpt/test/integration/update_python_reqs_test.py
|
import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
|
import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/", client.out)
self.assertIn(": 123,234", client.out)
|
Fix pyreq test on Windows
|
Fix pyreq test on Windows
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
|
Python
|
mit
|
conan-io/conan-package-tools
|
import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
Fix pyreq test on Windows
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
|
import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/", client.out)
self.assertIn(": 123,234", client.out)
|
<commit_before>import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
<commit_msg>Fix pyreq test on Windows
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>
|
import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/", client.out)
self.assertIn(": 123,234", client.out)
|
import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
Fix pyreq test on Windows
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/", client.out)
self.assertIn(": 123,234", client.out)
|
<commit_before>import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
<commit_msg>Fix pyreq test on Windows
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/", client.out)
self.assertIn(": 123,234", client.out)
|
54faa9a6023cb412bfd34582a6911b1c8eda79e3
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
yaml.safe_load(code)
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
for x in yaml.safe_load_all(code):
# exhausting generator so all documents are checked
pass
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
|
Use safe_load_all to detect errors in multi-document files
|
Use safe_load_all to detect errors in multi-document files
|
Python
|
mit
|
pheanex/SublimeLinter-pyyaml,SublimeLinter/SublimeLinter-pyyaml
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
yaml.safe_load(code)
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
Use safe_load_all to detect errors in multi-document files
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
for x in yaml.safe_load_all(code):
# exhausting generator so all documents are checked
pass
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
yaml.safe_load(code)
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
<commit_msg>Use safe_load_all to detect errors in multi-document files<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
for x in yaml.safe_load_all(code):
# exhausting generator so all documents are checked
pass
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
yaml.safe_load(code)
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
Use safe_load_all to detect errors in multi-document files#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
for x in yaml.safe_load_all(code):
# exhausting generator so all documents are checked
pass
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
yaml.safe_load(code)
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
<commit_msg>Use safe_load_all to detect errors in multi-document files<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by NotSqrt
# Copyright (c) 2013 NotSqrt
#
# License: MIT
#
"""This module exports the Pyyaml plugin class."""
from SublimeLinter.lint import PythonLinter, persist
class Pyyaml(PythonLinter):
"""Provides an interface to pyyaml."""
syntax = 'yaml'
cmd = None
regex = r'^:(?P<line>\d+):(?P<col>\d+): (?P<message>.+)'
line_col_base = (0, 0) # the lines and columns are 0-based
module = 'yaml'
def check(self, code, filename):
"""
Call directly the yaml module, and handles the exception. Return str.
Very similar to the SublimeLinter-json linter, except yaml is not in the python core library.
"""
yaml = self.module
try:
for x in yaml.safe_load_all(code):
# exhausting generator so all documents are checked
pass
except yaml.error.YAMLError as exc:
if persist.settings.get('debug'):
persist.printf('{} - {} : {}'.format(self.name, type(exc), exc))
message = '{} : {} {}'.format(type(exc).__name__, exc.problem, exc.context)
return ':{}:{}: {}\n'.format(exc.problem_mark.line, exc.problem_mark.column, message)
except Exception as exc:
persist.printf('{} - uncaught exception - {} : {}'.format(self.name, type(exc), exc))
return ''
|
5aa54a94929354910d190b9b37f895d0416d7361
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
syntax = ('*')
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
selectors = {
'*': 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
defaults = {
"selector": 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
|
Update for SublimeLinter 4 API.
|
Update for SublimeLinter 4 API.
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/14
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/11
|
Python
|
mit
|
ckaznocha/SublimeLinter-contrib-write-good
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
syntax = ('*')
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
selectors = {
'*': 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
Update for SublimeLinter 4 API.
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/14
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/11
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
defaults = {
"selector": 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
syntax = ('*')
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
selectors = {
'*': 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
<commit_msg>Update for SublimeLinter 4 API.
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/14
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/11<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
defaults = {
"selector": 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
syntax = ('*')
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
selectors = {
'*': 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
Update for SublimeLinter 4 API.
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/14
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/11#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
defaults = {
"selector": 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
syntax = ('*')
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
selectors = {
'*': 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
<commit_msg>Update for SublimeLinter 4 API.
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/14
github.com/ckaznocha/SublimeLinter-contrib-write-good/issues/11<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Clifton Kaznocha
# Copyright (c) 2014 Clifton Kaznocha
#
# License: MIT
#
"""This module exports the WriteGood plugin class."""
import SublimeLinter
from SublimeLinter.lint import NodeLinter
if getattr(SublimeLinter.lint, 'VERSION', 3) > 3:
from SublimeLinter.lint import const
WARNING = const.WARNING
else:
from SublimeLinter.lint import highlight
WARNING = highlight.WARNING
class WriteGood(NodeLinter):
"""Provides an interface to write-good."""
cmd = ('write-good')
npm_name = 'write-good'
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = ">=0.9.0"
regex = r'''(?xi)
^(?P<message>(?P<near>"[^"]*").*)\son\sline\s(?P<line>\d+)\sat\scolumn\s\d+$
'''
multiline = True
default_type = WARNING
defaults = {
"selector": 'text.html.markdown, text.plain, text.tex.latex, comment'
}
tempfile_suffix = '.tmp'
|
e7bfa4bc9bc8c1caf7ef5f4618943543bed99f0a
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
from SublimeLinter.lint import Linter, util
import platform
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if platform.system() == 'Windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if sublime.platform() == 'windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
Use the platform() in sublime.py rather than importing platform.
|
Use the platform() in sublime.py rather than importing platform.
|
Python
|
mit
|
jfcherng/SublimeLinter-contrib-iverilog,jfcherng/SublimeLinter-contrib-iverilog
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
from SublimeLinter.lint import Linter, util
import platform
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if platform.system() == 'Windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
Use the platform() in sublime.py rather than importing platform.
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if sublime.platform() == 'windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
from SublimeLinter.lint import Linter, util
import platform
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if platform.system() == 'Windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
<commit_msg>Use the platform() in sublime.py rather than importing platform.<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if sublime.platform() == 'windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
from SublimeLinter.lint import Linter, util
import platform
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if platform.system() == 'Windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
Use the platform() in sublime.py rather than importing platform.#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if sublime.platform() == 'windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
from SublimeLinter.lint import Linter, util
import platform
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if platform.system() == 'Windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
<commit_msg>Use the platform() in sublime.py rather than importing platform.<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Cherng
# Copyright (c) 2015 jfcherng
#
# License: MIT
#
import sublime, sublime_plugin
from SublimeLinter.lint import Linter, util
class Iverilog (Linter):
syntax = ('verilog')
cmd = 'iverilog -t null'
tempfile_suffix = 'verilog'
# We are missing out on some errors by ignoring multiline messages.
if sublime.platform() == 'windows':
regex = (
r'^([^:]+):.*:(?P<line>\d*):'
r'.((?P<error>error)|(?P<warning>warning))?'
r'(?P<message>.*)'
)
else:
regex = (
r'^([^:]+):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning): )?'
r'(?P<message>.+)'
)
error_stream = util.STREAM_BOTH
|
87438b9dcdbd397d754b4317bd7724e5b663f5b1
|
dedupsqlfs/lib/cache/simple.py
|
dedupsqlfs/lib/cache/simple.py
|
# -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
|
# -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])[self.OFFSET_VALUE]
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
|
Fix value get - use offset
|
Fix value get - use offset
|
Python
|
mit
|
sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs
|
# -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
Fix value get - use offset
|
# -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])[self.OFFSET_VALUE]
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
|
<commit_before># -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
<commit_msg>Fix value get - use offset<commit_after>
|
# -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])[self.OFFSET_VALUE]
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
|
# -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
Fix value get - use offset# -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])[self.OFFSET_VALUE]
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
|
<commit_before># -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
<commit_msg>Fix value get - use offset<commit_after># -*- coding: utf8 -*-
from time import time
__author__ = 'sergey'
class CacheTTLseconds(object):
"""
Simple cache storage
{
key (int | str) : [
timestamp (float), - then added, updated, set to 0 if expired
values (int | str) - some data
], ...
}
"""
OFFSET_TIME = 0
OFFSET_VALUE = 1
_max_ttl = 300
_storage = None
def __init__(self):
self._storage = {}
pass
def __len__(self):
return len(self._storage)
def set_max_ttl(self, seconds):
self._max_ttl = seconds
return self
def set(self, key, value):
self._storage[ key ] = [time(), value]
return self
def get(self, key, default=None):
# not setted
val = self._storage.get(key, [0, default])[self.OFFSET_VALUE]
now = time()
# update time only if value was set
if key in self._storage:
self._storage[ key ][self.OFFSET_TIME] = now
return val
def unset(self, key):
if key in self._storage:
del self._storage[ key ]
return self
def clear(self):
now = time()
count = 0
for key, item in tuple(self._storage.items()):
if now - item[self.OFFSET_TIME] > self._max_ttl:
del self._storage[key]
count += 1
return count
|
cb3f208e2727cd1adea20c529ece5bd766f5e43d
|
users/models.py
|
users/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from settings.models import VotingSystem
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
|
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.contrib import admin
from settings.models import VotingSystem
import json
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
def clean(self):
# make sure that the details are a valid json object
try:
json.loads(self.details)
except:
raise ValidationError({
'details': ValidationError('Details needs to be a valid JSON object', code='invalid')
})
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
|
Make sure that user details are valid JSON
|
Make sure that user details are valid JSON
|
Python
|
mit
|
kuboschek/jay,OpenJUB/jay,OpenJUB/jay,OpenJUB/jay,kuboschek/jay,kuboschek/jay
|
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from settings.models import VotingSystem
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
Make sure that user details are valid JSON
|
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.contrib import admin
from settings.models import VotingSystem
import json
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
def clean(self):
# make sure that the details are a valid json object
try:
json.loads(self.details)
except:
raise ValidationError({
'details': ValidationError('Details needs to be a valid JSON object', code='invalid')
})
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from settings.models import VotingSystem
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
<commit_msg>Make sure that user details are valid JSON<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.contrib import admin
from settings.models import VotingSystem
import json
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
def clean(self):
# make sure that the details are a valid json object
try:
json.loads(self.details)
except:
raise ValidationError({
'details': ValidationError('Details needs to be a valid JSON object', code='invalid')
})
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
|
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from settings.models import VotingSystem
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
Make sure that user details are valid JSONfrom django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.contrib import admin
from settings.models import VotingSystem
import json
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
def clean(self):
# make sure that the details are a valid json object
try:
json.loads(self.details)
except:
raise ValidationError({
'details': ValidationError('Details needs to be a valid JSON object', code='invalid')
})
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from settings.models import VotingSystem
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
<commit_msg>Make sure that user details are valid JSON<commit_after>from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from django.contrib import admin
from settings.models import VotingSystem
import json
# Create your models here.
class Admin(models.Model):
user = models.ForeignKey(User)
system = models.ForeignKey(VotingSystem)
def __unicode__(self):
return u'[%s] %s' % (self.system.machine_name, self.user)
class SuperAdmin(models.Model):
user = models.ForeignKey(User)
def __unicode__(self):
return u'%s' % (self.user)
class UserProfile(models.Model):
user = models.OneToOneField(User, related_name="profile")
details = models.TextField()
def __unicode__(self):
return u'[Profile] %s' % (self.user.username)
def clean(self):
# make sure that the details are a valid json object
try:
json.loads(self.details)
except:
raise ValidationError({
'details': ValidationError('Details needs to be a valid JSON object', code='invalid')
})
admin.site.register(Admin)
admin.site.register(SuperAdmin)
admin.site.register(UserProfile)
|
9eb09783d1317c77d65239e1d4c5aceb6eb1bb4b
|
pysyte/net/hosts.py
|
pysyte/net/hosts.py
|
"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
@dataclass
class Host:
hostname: str
aliases: list
addresses: list
users: list
localhost = Host(
*(list(socket.gethostbyname_ex(socket.gethostname())) + [getpass.getuser()])
)
|
"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
from typing import List
@dataclass
class Host:
hostname: str
aliases: List[str]
addresses: List[str]
users: List[str]
def _read_localhost() -> Host:
"""Read host values for localhost"""
host, aliases, addresses = socket.gethostbyname_ex(socket.gethostname())
user = getpass.getuser()
return Host(host, aliases, addresses, [user])
localhost = _read_localhost()
|
Add method to clarify localhost data
|
Add method to clarify localhost data
|
Python
|
mit
|
jalanb/dotsite
|
"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
@dataclass
class Host:
hostname: str
aliases: list
addresses: list
users: list
localhost = Host(
*(list(socket.gethostbyname_ex(socket.gethostname())) + [getpass.getuser()])
)
Add method to clarify localhost data
|
"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
from typing import List
@dataclass
class Host:
hostname: str
aliases: List[str]
addresses: List[str]
users: List[str]
def _read_localhost() -> Host:
"""Read host values for localhost"""
host, aliases, addresses = socket.gethostbyname_ex(socket.gethostname())
user = getpass.getuser()
return Host(host, aliases, addresses, [user])
localhost = _read_localhost()
|
<commit_before>"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
@dataclass
class Host:
hostname: str
aliases: list
addresses: list
users: list
localhost = Host(
*(list(socket.gethostbyname_ex(socket.gethostname())) + [getpass.getuser()])
)
<commit_msg>Add method to clarify localhost data<commit_after>
|
"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
from typing import List
@dataclass
class Host:
hostname: str
aliases: List[str]
addresses: List[str]
users: List[str]
def _read_localhost() -> Host:
"""Read host values for localhost"""
host, aliases, addresses = socket.gethostbyname_ex(socket.gethostname())
user = getpass.getuser()
return Host(host, aliases, addresses, [user])
localhost = _read_localhost()
|
"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
@dataclass
class Host:
hostname: str
aliases: list
addresses: list
users: list
localhost = Host(
*(list(socket.gethostbyname_ex(socket.gethostname())) + [getpass.getuser()])
)
Add method to clarify localhost data"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
from typing import List
@dataclass
class Host:
hostname: str
aliases: List[str]
addresses: List[str]
users: List[str]
def _read_localhost() -> Host:
"""Read host values for localhost"""
host, aliases, addresses = socket.gethostbyname_ex(socket.gethostname())
user = getpass.getuser()
return Host(host, aliases, addresses, [user])
localhost = _read_localhost()
|
<commit_before>"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
@dataclass
class Host:
hostname: str
aliases: list
addresses: list
users: list
localhost = Host(
*(list(socket.gethostbyname_ex(socket.gethostname())) + [getpass.getuser()])
)
<commit_msg>Add method to clarify localhost data<commit_after>"""Simplified hosts for pysyte"""
import getpass
import socket
from dataclasses import dataclass
from typing import List
@dataclass
class Host:
hostname: str
aliases: List[str]
addresses: List[str]
users: List[str]
def _read_localhost() -> Host:
"""Read host values for localhost"""
host, aliases, addresses = socket.gethostbyname_ex(socket.gethostname())
user = getpass.getuser()
return Host(host, aliases, addresses, [user])
localhost = _read_localhost()
|
85807f8de56eb5f8db805ca80f8c11c270cee100
|
config/settings_production.py
|
config/settings_production.py
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('/etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
|
Fix path for secret key to be relative
|
Fix path for secret key to be relative
|
Python
|
agpl-3.0
|
mmilaprat/policycompass-services,policycompass/policycompass-services,policycompass/policycompass-services,policycompass/policycompass-services,mmilaprat/policycompass-services,mmilaprat/policycompass-services
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('/etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
Fix path for secret key to be relative
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
|
<commit_before>"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('/etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
<commit_msg>Fix path for secret key to be relative<commit_after>
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
|
"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('/etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
Fix path for secret key to be relative"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
|
<commit_before>"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('/etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
<commit_msg>Fix path for secret key to be relative<commit_after>"""
Django settings for pc_datamanger project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
from .settings_basic import *
from .settings import *
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = [
"services-stage.policycompass.eu",
"services-prod.policycompass.eu",
"localhost"
]
with open('etc/policycompass/secret_key') as f:
SECRET_KEY = f.read().strip()
|
95e9fd8e8bc568b4446b708248dedfc0a739e85c
|
inthe_am/taskmanager/features/environment.py
|
inthe_am/taskmanager/features/environment.py
|
import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
|
import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
context.browser.driver.set_window_size(1024, 768)
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
|
Set window size to something a little bigger.
|
Set window size to something a little bigger.
|
Python
|
agpl-3.0
|
coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am
|
import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
Set window size to something a little bigger.
|
import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
context.browser.driver.set_window_size(1024, 768)
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
|
<commit_before>import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
<commit_msg>Set window size to something a little bigger.<commit_after>
|
import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
context.browser.driver.set_window_size(1024, 768)
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
|
import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
Set window size to something a little bigger.import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
context.browser.driver.set_window_size(1024, 768)
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
|
<commit_before>import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
<commit_msg>Set window size to something a little bigger.<commit_after>import os
from urlparse import urljoin
from django.conf import settings
from splinter.browser import Browser
from inthe_am.taskmanager import models
TEST_COUNTERS = {}
def before_all(context):
context.browser = Browser('phantomjs')
def after_all(context):
context.browser.quit()
context.browser = None
def before_scenario(context, step):
models.User.objects.filter(
email=settings.TESTING_LOGIN_USER
).delete()
context.browser.driver.set_window_size(1024, 768)
def after_scenario(context, step):
context.browser.visit(urljoin(context.config.server_url, '/logout/'))
def after_step(context, step):
global TEST_COUNTERS
if context.failed:
name = '-'.join([
context.scenario.name.replace(' ', '_'),
])
if name not in TEST_COUNTERS:
TEST_COUNTERS[name] = 0
TEST_COUNTERS[name] += 1
name = name + '_%s_' % TEST_COUNTERS[name]
context.browser.screenshot(name)
with open(os.path.join('/tmp', name + '.html'), 'w') as out:
out.write(context.browser.html)
|
5f799aa16b2bfd35fc68073e73b85cf9ad75ba47
|
tests/__init__.py
|
tests/__init__.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Package set up."""
import oauth2client.util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
oauth2client.util.positional_parameters_enforcement = 'EXCEPTION'
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test package set-up."""
from oauth2client import util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
util.positional_parameters_enforcement = util.POSITIONAL_EXCEPTION
|
Use symbolic constant rather than literal value
|
Use symbolic constant rather than literal value
|
Python
|
apache-2.0
|
google/oauth2client,clancychilds/oauth2client,clancychilds/oauth2client,jonparrott/oauth2client,google/oauth2client,googleapis/oauth2client,googleapis/oauth2client,jonparrott/oauth2client
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Package set up."""
import oauth2client.util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
oauth2client.util.positional_parameters_enforcement = 'EXCEPTION'
Use symbolic constant rather than literal value
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test package set-up."""
from oauth2client import util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
util.positional_parameters_enforcement = util.POSITIONAL_EXCEPTION
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Package set up."""
import oauth2client.util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
oauth2client.util.positional_parameters_enforcement = 'EXCEPTION'
<commit_msg>Use symbolic constant rather than literal value<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test package set-up."""
from oauth2client import util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
util.positional_parameters_enforcement = util.POSITIONAL_EXCEPTION
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Package set up."""
import oauth2client.util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
oauth2client.util.positional_parameters_enforcement = 'EXCEPTION'
Use symbolic constant rather than literal value# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test package set-up."""
from oauth2client import util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
util.positional_parameters_enforcement = util.POSITIONAL_EXCEPTION
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test Package set up."""
import oauth2client.util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
oauth2client.util.positional_parameters_enforcement = 'EXCEPTION'
<commit_msg>Use symbolic constant rather than literal value<commit_after># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test package set-up."""
from oauth2client import util
__author__ = 'afshar@google.com (Ali Afshar)'
def setup_package():
"""Run on testing package."""
util.positional_parameters_enforcement = util.POSITIONAL_EXCEPTION
|
6be8907881870fd6dd5e5629b7cbc9fe491c35dd
|
Class_Pattern.py
|
Class_Pattern.py
|
class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern = pattern
for letter in self.pattern:
if letter != 'C' and letter != 'V':
raise TypeError("Error: pattern is incorrectly formatted\nHere is and example pattern 'CVCVC'")
x = Pattern('CVCV')
|
import random
class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern_dict = {'C': self.consonants, 'V': self.vowels}
self.pattern = pattern
def create_word(self):
string_list = []
for letter in self.pattern:
if letter not in self.pattern_dict:
raise TypeError("Error: pattern is incorrectly formatted")
string_list.append(random.choice(self.pattern_dict[letter]))
return "".join(string_list)
x = Pattern('CVCV')
print(x.create_word())
print(x.pattern_dict)
|
Add create_word function in pattern class
|
Add create_word function in pattern class
|
Python
|
mit
|
achyutreddy24/WordGen
|
class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern = pattern
for letter in self.pattern:
if letter != 'C' and letter != 'V':
raise TypeError("Error: pattern is incorrectly formatted\nHere is and example pattern 'CVCVC'")
x = Pattern('CVCV')Add create_word function in pattern class
|
import random
class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern_dict = {'C': self.consonants, 'V': self.vowels}
self.pattern = pattern
def create_word(self):
string_list = []
for letter in self.pattern:
if letter not in self.pattern_dict:
raise TypeError("Error: pattern is incorrectly formatted")
string_list.append(random.choice(self.pattern_dict[letter]))
return "".join(string_list)
x = Pattern('CVCV')
print(x.create_word())
print(x.pattern_dict)
|
<commit_before>class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern = pattern
for letter in self.pattern:
if letter != 'C' and letter != 'V':
raise TypeError("Error: pattern is incorrectly formatted\nHere is and example pattern 'CVCVC'")
x = Pattern('CVCV')<commit_msg>Add create_word function in pattern class<commit_after>
|
import random
class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern_dict = {'C': self.consonants, 'V': self.vowels}
self.pattern = pattern
def create_word(self):
string_list = []
for letter in self.pattern:
if letter not in self.pattern_dict:
raise TypeError("Error: pattern is incorrectly formatted")
string_list.append(random.choice(self.pattern_dict[letter]))
return "".join(string_list)
x = Pattern('CVCV')
print(x.create_word())
print(x.pattern_dict)
|
class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern = pattern
for letter in self.pattern:
if letter != 'C' and letter != 'V':
raise TypeError("Error: pattern is incorrectly formatted\nHere is and example pattern 'CVCVC'")
x = Pattern('CVCV')Add create_word function in pattern classimport random
class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern_dict = {'C': self.consonants, 'V': self.vowels}
self.pattern = pattern
def create_word(self):
string_list = []
for letter in self.pattern:
if letter not in self.pattern_dict:
raise TypeError("Error: pattern is incorrectly formatted")
string_list.append(random.choice(self.pattern_dict[letter]))
return "".join(string_list)
x = Pattern('CVCV')
print(x.create_word())
print(x.pattern_dict)
|
<commit_before>class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern = pattern
for letter in self.pattern:
if letter != 'C' and letter != 'V':
raise TypeError("Error: pattern is incorrectly formatted\nHere is and example pattern 'CVCVC'")
x = Pattern('CVCV')<commit_msg>Add create_word function in pattern class<commit_after>import random
class Pattern(object):
def __init__(self, pattern):
self.consonants = ['b','c','d','f','g','h','j','k','l','m','n','p','q','r','s','t','v','w','x','y','z']
self.vowels = ['a','e','i','o','u']
self.pattern_dict = {'C': self.consonants, 'V': self.vowels}
self.pattern = pattern
def create_word(self):
string_list = []
for letter in self.pattern:
if letter not in self.pattern_dict:
raise TypeError("Error: pattern is incorrectly formatted")
string_list.append(random.choice(self.pattern_dict[letter]))
return "".join(string_list)
x = Pattern('CVCV')
print(x.create_word())
print(x.pattern_dict)
|
ad262d62269edf0c297c33da39ca605713d5cf74
|
rnacentral/nhmmer/settings.py
|
rnacentral/nhmmer/settings.py
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 1000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 10000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
|
Increase maximum query length to 10K
|
Increase maximum query length to 10K
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 1000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
Increase maximum query length to 10K
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 10000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
|
<commit_before>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 1000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
<commit_msg>Increase maximum query length to 10K<commit_after>
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 10000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 1000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
Increase maximum query length to 10K"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 10000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
|
<commit_before>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 1000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
<commit_msg>Increase maximum query length to 10K<commit_after>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# minimum query sequence length
MIN_LENGTH = 11
# maximum query sequence length
MAX_LENGTH = 10000
# Redis results expiration time
EXPIRATION = 60*60*24*7 # seconds
# maximum time to run nhmmer
MAX_RUN_TIME = 60*60 # seconds
# full path to query files
QUERY_DIR = ''
# full path to results files
RESULTS_DIR = ''
# full path to nhmmer executable
NHMMER_EXECUTABLE = ''
# full path to sequence database
SEQDATABASE = ''
|
0097f33900b6d75df38b28012a1e09fb03e22326
|
driller/tasks.py
|
driller/tasks.py
|
import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, out_dir, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
|
import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
|
Remove out_dir from the drill task's list of arguments
|
Remove out_dir from the drill task's list of arguments
|
Python
|
bsd-2-clause
|
shellphish/driller
|
import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, out_dir, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
Remove out_dir from the drill task's list of arguments
|
import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
|
<commit_before>import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, out_dir, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
<commit_msg>Remove out_dir from the drill task's list of arguments<commit_after>
|
import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
|
import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, out_dir, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
Remove out_dir from the drill task's list of argumentsimport redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
|
<commit_before>import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, out_dir, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
<commit_msg>Remove out_dir from the drill task's list of arguments<commit_after>import redis
from celery import Celery
from .driller import Driller
app = Celery('tasks', broker='amqp://guest@localhost//', backend='redis://localhost')
redis_pool = redis.ConnectionPool(host='localhost', port=6379, db=1)
@app.task
def drill(binary, input, fuzz_bitmap, qemu_dir):
redis_inst = redis.Redis(connection_pool=redis_pool)
driller = Driller(binary, input, fuzz_bitmap, qemu_dir, redis=redis_inst)
return driller.drill()
|
3aaf18dc43ee81bf2669eb597444d713d1577ebe
|
Motor/src/main/python/vehicles.py
|
Motor/src/main/python/vehicles.py
|
from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
print("Set %d motor to %d command with %d speed" % (index + 1, command + 1, speed))
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}
|
from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}
|
Update motor API with new data model
|
Update motor API with new data model
|
Python
|
mit
|
misalcedo/RapBot,misalcedo/RapBot,misalcedo/RapBot,misalcedo/RapBot
|
from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
print("Set %d motor to %d command with %d speed" % (index + 1, command + 1, speed))
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}Update motor API with new data model
|
from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}
|
<commit_before>from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
print("Set %d motor to %d command with %d speed" % (index + 1, command + 1, speed))
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}<commit_msg>Update motor API with new data model<commit_after>
|
from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}
|
from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
print("Set %d motor to %d command with %d speed" % (index + 1, command + 1, speed))
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}Update motor API with new data modelfrom Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}
|
<commit_before>from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
print("Set %d motor to %d command with %d speed" % (index + 1, command + 1, speed))
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}<commit_msg>Update motor API with new data model<commit_after>from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
motor = self.motor_hat.getMotor(index + 1)
motor.run(command + 1)
motor.setSpeed(speed)
motor_state = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor_state
elif index == n:
self.motors.append(motor_state)
else:
raise IndexError()
def dict(self):
return {"motors": self.motors}
|
322f55a0c06c81c91d779c002993c1fff3ae0f1b
|
dthm4kaiako/config/__init__.py
|
dthm4kaiako/config/__init__.py
|
"""Configuration for Django system."""
__version__ = "0.12.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.13.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
Increment version number to 0.13.0
|
Increment version number to 0.13.0
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Configuration for Django system."""
__version__ = "0.12.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.13.0
|
"""Configuration for Django system."""
__version__ = "0.13.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.12.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.13.0<commit_after>
|
"""Configuration for Django system."""
__version__ = "0.13.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.12.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.13.0"""Configuration for Django system."""
__version__ = "0.13.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.12.1"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.13.0<commit_after>"""Configuration for Django system."""
__version__ = "0.13.0"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
1fb46372db32fdb2606c560a1bbec168628039a7
|
lucid/modelzoo/other_models/CLIPx4.py
|
lucid/modelzoo/other_models/CLIPx4.py
|
# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "https://openaipublic.blob.core.windows.net/clip/tf/RN50_4x/084ee9c176da32014b0ebe42cd7ca66e/image32.pb"
|
# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "gs://modelzoo/vision/other_models/Clip_ResNet50.pb"
|
Update CLIP model URL to modelzoo bucket
|
Update CLIP model URL to modelzoo bucket
|
Python
|
apache-2.0
|
tensorflow/lucid,tensorflow/lucid,tensorflow/lucid,tensorflow/lucid
|
# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "https://openaipublic.blob.core.windows.net/clip/tf/RN50_4x/084ee9c176da32014b0ebe42cd7ca66e/image32.pb"
Update CLIP model URL to modelzoo bucket
|
# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "gs://modelzoo/vision/other_models/Clip_ResNet50.pb"
|
<commit_before># Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "https://openaipublic.blob.core.windows.net/clip/tf/RN50_4x/084ee9c176da32014b0ebe42cd7ca66e/image32.pb"
<commit_msg>Update CLIP model URL to modelzoo bucket<commit_after>
|
# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "gs://modelzoo/vision/other_models/Clip_ResNet50.pb"
|
# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "https://openaipublic.blob.core.windows.net/clip/tf/RN50_4x/084ee9c176da32014b0ebe42cd7ca66e/image32.pb"
Update CLIP model URL to modelzoo bucket# Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "gs://modelzoo/vision/other_models/Clip_ResNet50.pb"
|
<commit_before># Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "https://openaipublic.blob.core.windows.net/clip/tf/RN50_4x/084ee9c176da32014b0ebe42cd7ca66e/image32.pb"
<commit_msg>Update CLIP model URL to modelzoo bucket<commit_after># Copyright 2018 The Lucid Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from lucid.modelzoo.vision_base import Model
class CLIPImage(Model):
image_value_range = (0, 255)
input_name = 'input_image'
model_name = "RN50_4x"
image_shape = [288, 288, 3]
model_path = "gs://modelzoo/vision/other_models/Clip_ResNet50.pb"
|
ea653bca0ac4c1f0c8aba581386909fe180dc2ee
|
entrypoint.py
|
entrypoint.py
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
print('{} {} {}', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
if (os.environ.get('K_SERVICE')) != '':
print('Running HTTP service for Google Cloud')
# app = Flask(__name__)
# @app.route('/')
# def ContainerService():
# return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
# app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
else:
print('Running locally')
# stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
# output = stream.read()
# output
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
if not os.environ.get('K_SERVICE'):
print('Running locally')
stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
output = stream.read()
output
else:
print('Running HTTP service {} {} {} for Google Cloud', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
app = Flask(__name__)
@app.route('/')
def ContainerService():
return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
|
Debug Google Cloud Run support
|
Debug Google Cloud Run support
|
Python
|
mit
|
diodesign/diosix
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
print('{} {} {}', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
if (os.environ.get('K_SERVICE')) != '':
print('Running HTTP service for Google Cloud')
# app = Flask(__name__)
# @app.route('/')
# def ContainerService():
# return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
# app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
else:
print('Running locally')
# stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
# output = stream.read()
# output
Debug Google Cloud Run support
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
if not os.environ.get('K_SERVICE'):
print('Running locally')
stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
output = stream.read()
output
else:
print('Running HTTP service {} {} {} for Google Cloud', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
app = Flask(__name__)
@app.route('/')
def ContainerService():
return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
|
<commit_before>#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
print('{} {} {}', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
if (os.environ.get('K_SERVICE')) != '':
print('Running HTTP service for Google Cloud')
# app = Flask(__name__)
# @app.route('/')
# def ContainerService():
# return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
# app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
else:
print('Running locally')
# stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
# output = stream.read()
# output
<commit_msg>Debug Google Cloud Run support<commit_after>
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
if not os.environ.get('K_SERVICE'):
print('Running locally')
stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
output = stream.read()
output
else:
print('Running HTTP service {} {} {} for Google Cloud', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
app = Flask(__name__)
@app.route('/')
def ContainerService():
return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
|
#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
print('{} {} {}', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
if (os.environ.get('K_SERVICE')) != '':
print('Running HTTP service for Google Cloud')
# app = Flask(__name__)
# @app.route('/')
# def ContainerService():
# return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
# app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
else:
print('Running locally')
# stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
# output = stream.read()
# output
Debug Google Cloud Run support#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
if not os.environ.get('K_SERVICE'):
print('Running locally')
stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
output = stream.read()
output
else:
print('Running HTTP service {} {} {} for Google Cloud', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
app = Flask(__name__)
@app.route('/')
def ContainerService():
return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
|
<commit_before>#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
print('{} {} {}', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
if (os.environ.get('K_SERVICE')) != '':
print('Running HTTP service for Google Cloud')
# app = Flask(__name__)
# @app.route('/')
# def ContainerService():
# return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
# app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
else:
print('Running locally')
# stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
# output = stream.read()
# output
<commit_msg>Debug Google Cloud Run support<commit_after>#!/usr/bin/python3
#
# Define containerized environment for running Diosix on Qemu
#
# On Google Cloud Run: Creates HTTP server on port 8080
# or whatever was specified using the PORT system variable.
# Outputs via the HTTP port. This requires K_SERVICE to be set.
#
# On all other environments: Log to stdout
#
# syntax: entrypoint.py <command>
#
# Author: Chris Williams <diodesign@tuta.io>
#
import os
import sys
global command_result
from flask import Flask
if __name__ == "__main__":
if not os.environ.get('K_SERVICE'):
print('Running locally')
stream = os.popen('. $HOME/.cargo/env && cd /build/diosix && {}'.format(' '.join(sys.argv[1:])))
output = stream.read()
output
else:
print('Running HTTP service {} {} {} for Google Cloud', os.environ.get('K_SERVICE'), os.environ.get('K_REVISION'), os.environ.get('K_CONFIGURATION'))
app = Flask(__name__)
@app.route('/')
def ContainerService():
return 'Container built. Use docker images and docker run in the Google Cloud shell to run this container.\n'
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
|
b3838e3ad3c8c9575c18ae9770504405fd3f5006
|
saltcloud/mapper.py
|
saltcloud/mapper.py
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
self.cloud.create(vm_)
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
if self.opts['parallel']:
multiprocessing.Process(
target=self.cloud.create(vm_)
).start()
else:
self.cloud.create(vm_)
|
Add parallel capability for running the map
|
Add parallel capability for running the map
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
self.cloud.create(vm_)
Add parallel capability for running the map
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
if self.opts['parallel']:
multiprocessing.Process(
target=self.cloud.create(vm_)
).start()
else:
self.cloud.create(vm_)
|
<commit_before>'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
self.cloud.create(vm_)
<commit_msg>Add parallel capability for running the map<commit_after>
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
if self.opts['parallel']:
multiprocessing.Process(
target=self.cloud.create(vm_)
).start()
else:
self.cloud.create(vm_)
|
'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
self.cloud.create(vm_)
Add parallel capability for running the map'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
if self.opts['parallel']:
multiprocessing.Process(
target=self.cloud.create(vm_)
).start()
else:
self.cloud.create(vm_)
|
<commit_before>'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
self.cloud.create(vm_)
<commit_msg>Add parallel capability for running the map<commit_after>'''
Read in a vm map file. The map file contains a mapping of profiles to names
allowing for individual vms to be created in a more stateful way
'''
# Import python libs
import os
import copy
import multiprocessing
# Import salt libs
import saltcloud.cloud
import salt.client
# Import third party libs
import yaml
class Map(object):
'''
Create a vm stateful map execution object
'''
def __init__(self, opts):
self.opts = opts
self.cloud = saltcloud.cloud.Cloud(self.opts)
self.map = self.read()
def read(self):
'''
Read in the specified map file and return the map structure
'''
if not self.opts['map']:
return {}
if not os.path.isfile(self.opts['map']):
return {}
try:
with open(self.opts['map'], 'rb') as fp_:
map_ = yaml.loads(fb_.read())
except Exception:
return {}
if 'include' in map_:
map_ = salt.config.include_config(map_, self.opts['map'])
return map_
def run_map(self):
'''
Execute the contents of the vm map
'''
for profile in self.map:
for name in self.map[profile]:
if not profile in self.opts['vm']:
continue
vm_ = copy.deepcopy(self.opts['vm'][profile])
vm_['name'] = name
if self.opts['parallel']:
multiprocessing.Process(
target=self.cloud.create(vm_)
).start()
else:
self.cloud.create(vm_)
|
7211ecb704a52f7dfe0984b1bb70305367f5104c
|
tools/gyp_dart.py
|
tools/gyp_dart.py
|
#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
def execute(args):
process = subprocess.Popen(args)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart/dart.gyp',
'runtime' : 'dart/runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'dart/third_party/gyp/gyp_main.py',
'--depth=dart', '-Idart/tools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
SCRIPT_DIR = os.path.dirname(sys.argv[0])
DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
def execute(args):
process = subprocess.Popen(args, cwd=DART_ROOT)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart.gyp',
'runtime' : 'runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'third_party/gyp/gyp_main.py',
'--depth=.', '-Itools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
|
Make tools/gypdart independent of the directory from which it is called.
|
Make tools/gypdart independent of the directory from which it is called.
This enables more independence in the way you can structure your checkout using gclient.
BUG=
R=ahe@google.com
Review URL: https://codereview.chromium.org//1023893003
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@44640 260f80e4-7a28-3924-810f-c04153c831b5
|
Python
|
bsd-3-clause
|
dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk
|
#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
def execute(args):
process = subprocess.Popen(args)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart/dart.gyp',
'runtime' : 'dart/runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'dart/third_party/gyp/gyp_main.py',
'--depth=dart', '-Idart/tools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
Make tools/gypdart independent of the directory from which it is called.
This enables more independence in the way you can structure your checkout using gclient.
BUG=
R=ahe@google.com
Review URL: https://codereview.chromium.org//1023893003
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@44640 260f80e4-7a28-3924-810f-c04153c831b5
|
#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
SCRIPT_DIR = os.path.dirname(sys.argv[0])
DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
def execute(args):
process = subprocess.Popen(args, cwd=DART_ROOT)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart.gyp',
'runtime' : 'runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'third_party/gyp/gyp_main.py',
'--depth=.', '-Itools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
def execute(args):
process = subprocess.Popen(args)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart/dart.gyp',
'runtime' : 'dart/runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'dart/third_party/gyp/gyp_main.py',
'--depth=dart', '-Idart/tools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
<commit_msg>Make tools/gypdart independent of the directory from which it is called.
This enables more independence in the way you can structure your checkout using gclient.
BUG=
R=ahe@google.com
Review URL: https://codereview.chromium.org//1023893003
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@44640 260f80e4-7a28-3924-810f-c04153c831b5<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
SCRIPT_DIR = os.path.dirname(sys.argv[0])
DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
def execute(args):
process = subprocess.Popen(args, cwd=DART_ROOT)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart.gyp',
'runtime' : 'runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'third_party/gyp/gyp_main.py',
'--depth=.', '-Itools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
def execute(args):
process = subprocess.Popen(args)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart/dart.gyp',
'runtime' : 'dart/runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'dart/third_party/gyp/gyp_main.py',
'--depth=dart', '-Idart/tools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
Make tools/gypdart independent of the directory from which it is called.
This enables more independence in the way you can structure your checkout using gclient.
BUG=
R=ahe@google.com
Review URL: https://codereview.chromium.org//1023893003
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@44640 260f80e4-7a28-3924-810f-c04153c831b5#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
SCRIPT_DIR = os.path.dirname(sys.argv[0])
DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
def execute(args):
process = subprocess.Popen(args, cwd=DART_ROOT)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart.gyp',
'runtime' : 'runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'third_party/gyp/gyp_main.py',
'--depth=.', '-Itools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
def execute(args):
process = subprocess.Popen(args)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart/dart.gyp',
'runtime' : 'dart/runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'dart/third_party/gyp/gyp_main.py',
'--depth=dart', '-Idart/tools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
<commit_msg>Make tools/gypdart independent of the directory from which it is called.
This enables more independence in the way you can structure your checkout using gclient.
BUG=
R=ahe@google.com
Review URL: https://codereview.chromium.org//1023893003
git-svn-id: c93d8a2297af3b929165606efe145742a534bc71@44640 260f80e4-7a28-3924-810f-c04153c831b5<commit_after>#!/usr/bin/env python
# Copyright (c) 2012 The Dart Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invoke gyp to generate build files for building the Dart VM.
"""
import os
import subprocess
import sys
SCRIPT_DIR = os.path.dirname(sys.argv[0])
DART_ROOT = os.path.realpath(os.path.join(SCRIPT_DIR, '..'))
def execute(args):
process = subprocess.Popen(args, cwd=DART_ROOT)
process.wait()
return process.returncode
def main():
component = 'all'
if len(sys.argv) == 2:
component = sys.argv[1]
component_gyp_files = {
'all' : 'dart.gyp',
'runtime' : 'runtime/dart-runtime.gyp',
}
args = ['python', '-S', 'third_party/gyp/gyp_main.py',
'--depth=.', '-Itools/gyp/all.gypi',
component_gyp_files[component]]
if sys.platform == 'win32':
# Generate Visual Studio 2010 compatible files by default.
if not os.environ.get('GYP_MSVS_VERSION'):
args.extend(['-G', 'msvs_version=2010'])
sys.exit(execute(args))
if __name__ == '__main__':
main()
|
34d672ff25dc1b249744802bf3b05e4a0303e7c9
|
etest_test/common_test.py
|
etest_test/common_test.py
|
"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("test_", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
|
"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("_test", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
|
Fix module names for pytest.
|
Fix module names for pytest.
We've shifted the directories but we still have the following horrid
code for determining the module name to mock. Eventually we should
switch to patch object (much more modern and use the module under test
directly).
|
Python
|
mit
|
alunduil/etest,alunduil/etest
|
"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("test_", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
Fix module names for pytest.
We've shifted the directories but we still have the following horrid
code for determining the module name to mock. Eventually we should
switch to patch object (much more modern and use the module under test
directly).
|
"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("_test", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
|
<commit_before>"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("test_", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
<commit_msg>Fix module names for pytest.
We've shifted the directories but we still have the following horrid
code for determining the module name to mock. Eventually we should
switch to patch object (much more modern and use the module under test
directly).<commit_after>
|
"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("_test", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
|
"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("test_", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
Fix module names for pytest.
We've shifted the directories but we still have the following horrid
code for determining the module name to mock. Eventually we should
switch to patch object (much more modern and use the module under test
directly)."""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("_test", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
|
<commit_before>"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("test_", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
<commit_msg>Fix module names for pytest.
We've shifted the directories but we still have the following horrid
code for determining the module name to mock. Eventually we should
switch to patch object (much more modern and use the module under test
directly).<commit_after>"""Common testing bits."""
# Copyright (C) 2014 by Alex Brandt <alunduil@alunduil.com>
#
# etest is freely distributable under the terms of an MIT-style license.
# See COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
import re
import unittest
from typing import Set
from etest_test import helpers_test
logger = logging.getLogger(__name__)
class BaseEtestTest(unittest.TestCase):
"""Base Etest Test."""
mocks_mask: Set = set()
mocks: Set = set()
@property
def real_module(self):
"""Name of the real module."""
return re.sub(r"\.[^.]+", "", self.__module__.replace("_test", ""), 1)
def _patch(self, name):
logger.debug("mocking %s", self.real_module + "." + name)
_ = unittest.mock.patch(self.real_module + "." + name)
setattr(self, "mocked_" + name.replace(".", "_").strip("_"), _.start())
self.addCleanup(_.stop)
mocks.add("ebuild")
@helpers_test.mock("ebuild")
def mock_ebuild(self):
"""Mock ebuild."""
self._patch("ebuild")
|
ab9cd172641176c2ae8fdb0ec20d48e45499436e
|
django_extensions/management/technical_response.py
|
django_extensions/management/technical_response.py
|
# -*- coding: utf-8 -*-
import six
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
six.reraise(exc_type, exc_value, tb)
|
# -*- coding: utf-8 -*-
import six
from django.core.handlers.wsgi import WSGIHandler
wsgi_tb = None
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response wrapper is called on each 'Middleware' object to avoid
leaking exceptions. The wrapper eventually calls technical_500_response to create a response for
an error view.
Runserver_plus overrides the django debug view's technical_500_response function with this
to allow for an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response on each object in the stack of Middleware objects, re-raising an error
quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only raise the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
global wsgi_tb
# After an uncaught exception is raised the class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
wsgi_tb = tb
six.reraise(exc_type, exc_value, tb)
else:
six.reraise(exc_type, exc_value, wsgi_tb)
|
Reduce reraise pollution in runserver_plus traceback page
|
Reduce reraise pollution in runserver_plus traceback page
|
Python
|
mit
|
django-extensions/django-extensions,django-extensions/django-extensions,django-extensions/django-extensions
|
# -*- coding: utf-8 -*-
import six
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
six.reraise(exc_type, exc_value, tb)
Reduce reraise pollution in runserver_plus traceback page
|
# -*- coding: utf-8 -*-
import six
from django.core.handlers.wsgi import WSGIHandler
wsgi_tb = None
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response wrapper is called on each 'Middleware' object to avoid
leaking exceptions. The wrapper eventually calls technical_500_response to create a response for
an error view.
Runserver_plus overrides the django debug view's technical_500_response function with this
to allow for an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response on each object in the stack of Middleware objects, re-raising an error
quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only raise the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
global wsgi_tb
# After an uncaught exception is raised the class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
wsgi_tb = tb
six.reraise(exc_type, exc_value, tb)
else:
six.reraise(exc_type, exc_value, wsgi_tb)
|
<commit_before># -*- coding: utf-8 -*-
import six
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
six.reraise(exc_type, exc_value, tb)
<commit_msg>Reduce reraise pollution in runserver_plus traceback page<commit_after>
|
# -*- coding: utf-8 -*-
import six
from django.core.handlers.wsgi import WSGIHandler
wsgi_tb = None
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response wrapper is called on each 'Middleware' object to avoid
leaking exceptions. The wrapper eventually calls technical_500_response to create a response for
an error view.
Runserver_plus overrides the django debug view's technical_500_response function with this
to allow for an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response on each object in the stack of Middleware objects, re-raising an error
quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only raise the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
global wsgi_tb
# After an uncaught exception is raised the class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
wsgi_tb = tb
six.reraise(exc_type, exc_value, tb)
else:
six.reraise(exc_type, exc_value, wsgi_tb)
|
# -*- coding: utf-8 -*-
import six
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
six.reraise(exc_type, exc_value, tb)
Reduce reraise pollution in runserver_plus traceback page# -*- coding: utf-8 -*-
import six
from django.core.handlers.wsgi import WSGIHandler
wsgi_tb = None
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response wrapper is called on each 'Middleware' object to avoid
leaking exceptions. The wrapper eventually calls technical_500_response to create a response for
an error view.
Runserver_plus overrides the django debug view's technical_500_response function with this
to allow for an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response on each object in the stack of Middleware objects, re-raising an error
quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only raise the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
global wsgi_tb
# After an uncaught exception is raised the class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
wsgi_tb = tb
six.reraise(exc_type, exc_value, tb)
else:
six.reraise(exc_type, exc_value, wsgi_tb)
|
<commit_before># -*- coding: utf-8 -*-
import six
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
six.reraise(exc_type, exc_value, tb)
<commit_msg>Reduce reraise pollution in runserver_plus traceback page<commit_after># -*- coding: utf-8 -*-
import six
from django.core.handlers.wsgi import WSGIHandler
wsgi_tb = None
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response wrapper is called on each 'Middleware' object to avoid
leaking exceptions. The wrapper eventually calls technical_500_response to create a response for
an error view.
Runserver_plus overrides the django debug view's technical_500_response function with this
to allow for an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response on each object in the stack of Middleware objects, re-raising an error
quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only raise the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
global wsgi_tb
# After an uncaught exception is raised the class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
wsgi_tb = tb
six.reraise(exc_type, exc_value, tb)
else:
six.reraise(exc_type, exc_value, wsgi_tb)
|
68b2135eebe6e1475748ea34aeeb360ef7c79748
|
ibis/__init__.py
|
ibis/__init__.py
|
# --------------------------------------------------------------------------------------------------
# Ibis: a lightweight template engine.
#
# How it works: A lexer transforms a template string into a sequence of tokens. A parser takes this
# sequence and compiles it into a tree of nodes. Each node has a .render() method which takes a
# context object and returns a string. The entire compiled node tree can be rendered by calling
# .render() on the root node.
#
# Compiling and rendering the node tree are two distinct processes. The template only needs to be
# compiled once, it can then be cached and rendered multiple times with different context objects.
#
# The Template class acts as the public interface to the template engine. This is the only class
# the end-user needs to interact with directly. A Template object is initialized with a template
# string. It compiles the string and stores the resulting node tree for future rendering. Calling
# the template object's .render() method with a dictionary of key-value pairs or a set of keyword
# arguments renders the template and returns the result as a string.
#
# Example:
#
# >>> template = Template('{{foo}} and {{bar}}')
#
# >>> template.render(foo='ham', bar='eggs')
# 'ham and eggs'
#
# >>> template.render({'foo': 1, 'bar': 2})
# '1 and 2'
#
# --------------------------------------------------------------------------------------------------
from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
|
from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
|
Remove header comment, duplicated in docs
|
Remove header comment, duplicated in docs
|
Python
|
unlicense
|
dmulholland/ibis
|
# --------------------------------------------------------------------------------------------------
# Ibis: a lightweight template engine.
#
# How it works: A lexer transforms a template string into a sequence of tokens. A parser takes this
# sequence and compiles it into a tree of nodes. Each node has a .render() method which takes a
# context object and returns a string. The entire compiled node tree can be rendered by calling
# .render() on the root node.
#
# Compiling and rendering the node tree are two distinct processes. The template only needs to be
# compiled once, it can then be cached and rendered multiple times with different context objects.
#
# The Template class acts as the public interface to the template engine. This is the only class
# the end-user needs to interact with directly. A Template object is initialized with a template
# string. It compiles the string and stores the resulting node tree for future rendering. Calling
# the template object's .render() method with a dictionary of key-value pairs or a set of keyword
# arguments renders the template and returns the result as a string.
#
# Example:
#
# >>> template = Template('{{foo}} and {{bar}}')
#
# >>> template.render(foo='ham', bar='eggs')
# 'ham and eggs'
#
# >>> template.render({'foo': 1, 'bar': 2})
# '1 and 2'
#
# --------------------------------------------------------------------------------------------------
from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
Remove header comment, duplicated in docs
|
from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
|
<commit_before># --------------------------------------------------------------------------------------------------
# Ibis: a lightweight template engine.
#
# How it works: A lexer transforms a template string into a sequence of tokens. A parser takes this
# sequence and compiles it into a tree of nodes. Each node has a .render() method which takes a
# context object and returns a string. The entire compiled node tree can be rendered by calling
# .render() on the root node.
#
# Compiling and rendering the node tree are two distinct processes. The template only needs to be
# compiled once, it can then be cached and rendered multiple times with different context objects.
#
# The Template class acts as the public interface to the template engine. This is the only class
# the end-user needs to interact with directly. A Template object is initialized with a template
# string. It compiles the string and stores the resulting node tree for future rendering. Calling
# the template object's .render() method with a dictionary of key-value pairs or a set of keyword
# arguments renders the template and returns the result as a string.
#
# Example:
#
# >>> template = Template('{{foo}} and {{bar}}')
#
# >>> template.render(foo='ham', bar='eggs')
# 'ham and eggs'
#
# >>> template.render({'foo': 1, 'bar': 2})
# '1 and 2'
#
# --------------------------------------------------------------------------------------------------
from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
<commit_msg>Remove header comment, duplicated in docs<commit_after>
|
from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
|
# --------------------------------------------------------------------------------------------------
# Ibis: a lightweight template engine.
#
# How it works: A lexer transforms a template string into a sequence of tokens. A parser takes this
# sequence and compiles it into a tree of nodes. Each node has a .render() method which takes a
# context object and returns a string. The entire compiled node tree can be rendered by calling
# .render() on the root node.
#
# Compiling and rendering the node tree are two distinct processes. The template only needs to be
# compiled once, it can then be cached and rendered multiple times with different context objects.
#
# The Template class acts as the public interface to the template engine. This is the only class
# the end-user needs to interact with directly. A Template object is initialized with a template
# string. It compiles the string and stores the resulting node tree for future rendering. Calling
# the template object's .render() method with a dictionary of key-value pairs or a set of keyword
# arguments renders the template and returns the result as a string.
#
# Example:
#
# >>> template = Template('{{foo}} and {{bar}}')
#
# >>> template.render(foo='ham', bar='eggs')
# 'ham and eggs'
#
# >>> template.render({'foo': 1, 'bar': 2})
# '1 and 2'
#
# --------------------------------------------------------------------------------------------------
from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
Remove header comment, duplicated in docsfrom . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
|
<commit_before># --------------------------------------------------------------------------------------------------
# Ibis: a lightweight template engine.
#
# How it works: A lexer transforms a template string into a sequence of tokens. A parser takes this
# sequence and compiles it into a tree of nodes. Each node has a .render() method which takes a
# context object and returns a string. The entire compiled node tree can be rendered by calling
# .render() on the root node.
#
# Compiling and rendering the node tree are two distinct processes. The template only needs to be
# compiled once, it can then be cached and rendered multiple times with different context objects.
#
# The Template class acts as the public interface to the template engine. This is the only class
# the end-user needs to interact with directly. A Template object is initialized with a template
# string. It compiles the string and stores the resulting node tree for future rendering. Calling
# the template object's .render() method with a dictionary of key-value pairs or a set of keyword
# arguments renders the template and returns the result as a string.
#
# Example:
#
# >>> template = Template('{{foo}} and {{bar}}')
#
# >>> template.render(foo='ham', bar='eggs')
# 'ham and eggs'
#
# >>> template.render({'foo': 1, 'bar': 2})
# '1 and 2'
#
# --------------------------------------------------------------------------------------------------
from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
<commit_msg>Remove header comment, duplicated in docs<commit_after>from . import filters
from . import nodes
from . import loaders
from . import errors
from . import compiler
from .template import Template
# Library version.
__version__ = "2.0.0-alpha.1"
# Assign a template-loading callable here to enable the {% include %} and {% extends %} tags.
# The callable should accept one or more string arguments and either return an instance of the
# Template class or raise a TemplateLoadError exception.
loader = None
|
a0a297b02d0f97815a2ba2be8c4b6ec8e139b608
|
examples/app/app.py
|
examples/app/app.py
|
from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(
ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
|
from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
# Install werkzeug debugger
from werkzeug.debug import DebuggedApplication
app.add_wsgi_middleware(DebuggedApplication, evalex=True)
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
|
Add example for werkzeug middleware installation.
|
Add example for werkzeug middleware installation.
|
Python
|
apache-2.0
|
slinghq/sling
|
from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(
ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
Add example for werkzeug middleware installation.
|
from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
# Install werkzeug debugger
from werkzeug.debug import DebuggedApplication
app.add_wsgi_middleware(DebuggedApplication, evalex=True)
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
|
<commit_before>from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(
ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
<commit_msg>Add example for werkzeug middleware installation.<commit_after>
|
from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
# Install werkzeug debugger
from werkzeug.debug import DebuggedApplication
app.add_wsgi_middleware(DebuggedApplication, evalex=True)
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
|
from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(
ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
Add example for werkzeug middleware installation.from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
# Install werkzeug debugger
from werkzeug.debug import DebuggedApplication
app.add_wsgi_middleware(DebuggedApplication, evalex=True)
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
|
<commit_before>from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(
ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
<commit_msg>Add example for werkzeug middleware installation.<commit_after>from sling import Application
from sling.core.logger import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
# Install werkzeug debugger
from werkzeug.debug import DebuggedApplication
app.add_wsgi_middleware(DebuggedApplication, evalex=True)
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
|
2e6ad775b7876c0fa827481211be4d000a03aa48
|
scripts/check_deployment.py
|
scripts/check_deployment.py
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_get_templates(expect, url):
response = requests.get(f"{url}/templates")
expect(response.status_code) == 200
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_samples(expect, url):
response = requests.get(f"{url}/samples")
expect(response.status_code) == 200
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
Check slower endpoints during promotion
|
Check slower endpoints during promotion
|
Python
|
mit
|
jacebrowning/memegen,jacebrowning/memegen
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
Check slower endpoints during promotion
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_get_templates(expect, url):
response = requests.get(f"{url}/templates")
expect(response.status_code) == 200
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_samples(expect, url):
response = requests.get(f"{url}/samples")
expect(response.status_code) == 200
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
<commit_before>import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
<commit_msg>Check slower endpoints during promotion<commit_after>
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_get_templates(expect, url):
response = requests.get(f"{url}/templates")
expect(response.status_code) == 200
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_samples(expect, url):
response = requests.get(f"{url}/samples")
expect(response.status_code) == 200
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
Check slower endpoints during promotionimport os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_get_templates(expect, url):
response = requests.get(f"{url}/templates")
expect(response.status_code) == 200
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_samples(expect, url):
response = requests.get(f"{url}/samples")
expect(response.status_code) == 200
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
<commit_before>import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
<commit_msg>Check slower endpoints during promotion<commit_after>import os
import pytest
import requests
@pytest.fixture
def url():
return os.getenv("SITE", "http://localhost:5000")
def test_get_templates(expect, url):
response = requests.get(f"{url}/templates")
expect(response.status_code) == 200
def test_post_images(expect, url):
params = {"template_key": "iw", "text_lines": ["test", "deployment"]}
response = requests.post(f"{url}/images", json=params)
expect(response.status_code) == 201
expect(response.json()["url"]).endswith("/images/iw/test/deployment.png")
def test_get_samples(expect, url):
response = requests.get(f"{url}/samples")
expect(response.status_code) == 200
def test_get_image(expect, url):
response = requests.get(f"{url}/images/iw/tests_code/in_production.jpg")
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/jpeg"
def test_get_image_custom(expect, url):
response = requests.get(
f"{url}/images/custom/test.png"
"?alt=https://www.gstatic.com/webp/gallery/1.jpg"
)
expect(response.status_code) == 200
expect(response.headers["Content-Type"]) == "image/png"
|
5d35b947719106b7af67e4da92eaa4db0e5a6948
|
doc/examples/plot_tutorial_example.py
|
doc/examples/plot_tutorial_example.py
|
#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Fake out
"""
plt.show()
|
#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
def dummy():
"""Dummy docstring"""
pass
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Triple-quoted string which tries to break parser.
"""
plt.show()
|
Make plot2rst example more strict.
|
DOC: Make plot2rst example more strict.
|
Python
|
bsd-3-clause
|
matteoicardi/mpltools,tonysyu/mpltools
|
#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Fake out
"""
plt.show()
DOC: Make plot2rst example more strict.
|
#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
def dummy():
"""Dummy docstring"""
pass
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Triple-quoted string which tries to break parser.
"""
plt.show()
|
<commit_before>#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Fake out
"""
plt.show()
<commit_msg>DOC: Make plot2rst example more strict.<commit_after>
|
#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
def dummy():
"""Dummy docstring"""
pass
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Triple-quoted string which tries to break parser.
"""
plt.show()
|
#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Fake out
"""
plt.show()
DOC: Make plot2rst example more strict.#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
def dummy():
"""Dummy docstring"""
pass
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Triple-quoted string which tries to break parser.
"""
plt.show()
|
<commit_before>#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Fake out
"""
plt.show()
<commit_msg>DOC: Make plot2rst example more strict.<commit_after>#!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
def dummy():
"""Dummy docstring"""
pass
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Triple-quoted string which tries to break parser.
"""
plt.show()
|
a3422f79367838ac30ddceaadf26dbd045381f20
|
framework/archiver/settings.py
|
framework/archiver/settings.py
|
import math
from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = math.pow(1024, 3) # 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
|
from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = 1024 ** 3 # == math.pow(1024, 3) == 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
|
Use ** instead of math.pow
|
Use ** instead of math.pow
|
Python
|
apache-2.0
|
zamattiac/osf.io,leb2dg/osf.io,monikagrabowska/osf.io,bdyetton/prettychart,cslzchen/osf.io,lyndsysimon/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,samchrisinger/osf.io,chrisseto/osf.io,jmcarp/osf.io,Johnetordoff/osf.io,RomanZWang/osf.io,mattclark/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,acshi/osf.io,danielneis/osf.io,dplorimer/osf,zachjanicki/osf.io,samanehsan/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,wearpants/osf.io,ckc6cz/osf.io,caseyrollins/osf.io,arpitar/osf.io,haoyuchen1992/osf.io,crcresearch/osf.io,MerlinZhang/osf.io,ZobairAlijan/osf.io,jmcarp/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,RomanZWang/osf.io,aaxelb/osf.io,sbt9uc/osf.io,cosenal/osf.io,cldershem/osf.io,TomHeatwole/osf.io,felliott/osf.io,abought/osf.io,sloria/osf.io,zachjanicki/osf.io,acshi/osf.io,bdyetton/prettychart,crcresearch/osf.io,kch8qx/osf.io,CenterForOpenScience/osf.io,jinluyuan/osf.io,GageGaskins/osf.io,jmcarp/osf.io,ticklemepierce/osf.io,arpitar/osf.io,adlius/osf.io,fabianvf/osf.io,binoculars/osf.io,jmcarp/osf.io,dplorimer/osf,haoyuchen1992/osf.io,sloria/osf.io,sbt9uc/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,saradbowman/osf.io,petermalcolm/osf.io,DanielSBrown/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,abought/osf.io,KAsante95/osf.io,icereval/osf.io,arpitar/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,RomanZWang/osf.io,adlius/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,SSJohns/osf.io,rdhyee/osf.io,bdyetton/prettychart,jeffreyliu3230/osf.io,cslzchen/osf.io,lyndsysimon/osf.io,njantrania/osf.io,kch8qx/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,chrisseto/osf.io,aaxelb/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,caseyrollins/osf.io,reinaH/osf.io,asanfilippo7/osf.io,hmoco/osf.io,brianjgeiger/osf.io,doublebits/osf.io,felliott/osf.io,doublebits/osf.io,MerlinZhang/osf.io,Ghalko/osf.io,dplorimer/osf,bdyetton/prettychart,laurenrevere/osf.io,monikagrabowska/osf.io,icereval/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,petermalcolm/osf.io,acshi/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,leb2dg/osf.io,jeffreyliu3230/osf.io,sloria/osf.io,aaxelb/osf.io,caseyrygt/osf.io,petermalcolm/osf.io,mluo613/osf.io,laurenrevere/osf.io,emetsger/osf.io,amyshi188/osf.io,cosenal/osf.io,mluke93/osf.io,SSJohns/osf.io,adlius/osf.io,samanehsan/osf.io,Johnetordoff/osf.io,njantrania/osf.io,jolene-esposito/osf.io,cslzchen/osf.io,hmoco/osf.io,MerlinZhang/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,Nesiehr/osf.io,caneruguz/osf.io,samanehsan/osf.io,danielneis/osf.io,ckc6cz/osf.io,abought/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,njantrania/osf.io,cwisecarver/osf.io,emetsger/osf.io,chrisseto/osf.io,Nesiehr/osf.io,dplorimer/osf,jolene-esposito/osf.io,pattisdr/osf.io,mluo613/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,cldershem/osf.io,fabianvf/osf.io,amyshi188/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,jinluyuan/osf.io,caneruguz/osf.io,doublebits/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,ticklemepierce/osf.io,reinaH/osf.io,jinluyuan/osf.io,DanielSBrown/osf.io,mluke93/osf.io,arpitar/osf.io,erinspace/osf.io,HarryRybacki/osf.io,baylee-d/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,brianjgeiger/osf.io,mluo613/osf.io,adlius/osf.io,caseyrygt/osf.io,GageGaskins/osf.io,HalcyonChimera/osf.io,SSJohns/osf.io,Nesiehr/osf.io,alexschiller/osf.io,icereval/osf.io,rdhyee/osf.io,erinspace/osf.io,chrisseto/osf.io,Ghalko/osf.io,hmoco/osf.io,acshi/osf.io,abought/osf.io,jinluyuan/osf.io,zachjanicki/osf.io,mluke93/osf.io,brandonPurvis/osf.io,zamattiac/osf.io,reinaH/osf.io,jeffreyliu3230/osf.io,amyshi188/osf.io,jolene-esposito/osf.io,sbt9uc/osf.io,samchrisinger/osf.io,acshi/osf.io,cldershem/osf.io,emetsger/osf.io,zamattiac/osf.io,ckc6cz/osf.io,asanfilippo7/osf.io,wearpants/osf.io,monikagrabowska/osf.io,KAsante95/osf.io,HarryRybacki/osf.io,Nesiehr/osf.io,billyhunt/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,fabianvf/osf.io,njantrania/osf.io,amyshi188/osf.io,petermalcolm/osf.io,crcresearch/osf.io,jnayak1/osf.io,rdhyee/osf.io,billyhunt/osf.io,cosenal/osf.io,chennan47/osf.io,haoyuchen1992/osf.io,Johnetordoff/osf.io,mluke93/osf.io,leb2dg/osf.io,pattisdr/osf.io,wearpants/osf.io,HarryRybacki/osf.io,billyhunt/osf.io,mluo613/osf.io,asanfilippo7/osf.io,cwisecarver/osf.io,kwierman/osf.io,hmoco/osf.io,brianjgeiger/osf.io,cldershem/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,kwierman/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,KAsante95/osf.io,KAsante95/osf.io,monikagrabowska/osf.io,erinspace/osf.io,pattisdr/osf.io,chennan47/osf.io,saradbowman/osf.io,doublebits/osf.io,billyhunt/osf.io,SSJohns/osf.io,aaxelb/osf.io,lyndsysimon/osf.io,jnayak1/osf.io,jolene-esposito/osf.io,Ghalko/osf.io,mfraezz/osf.io,alexschiller/osf.io,mattclark/osf.io,laurenrevere/osf.io,kwierman/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,TomBaxter/osf.io,kch8qx/osf.io,fabianvf/osf.io,reinaH/osf.io,doublebits/osf.io,HarryRybacki/osf.io,cosenal/osf.io,caseyrollins/osf.io,ticklemepierce/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,felliott/osf.io,TomBaxter/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,danielneis/osf.io,ckc6cz/osf.io,binoculars/osf.io,binoculars/osf.io,samanehsan/osf.io,mattclark/osf.io,Ghalko/osf.io,chennan47/osf.io,alexschiller/osf.io,danielneis/osf.io,baylee-d/osf.io,jnayak1/osf.io,jeffreyliu3230/osf.io,caneruguz/osf.io,samchrisinger/osf.io,cslzchen/osf.io,mfraezz/osf.io,baylee-d/osf.io
|
import math
from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = math.pow(1024, 3) # 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
Use ** instead of math.pow
|
from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = 1024 ** 3 # == math.pow(1024, 3) == 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
|
<commit_before>import math
from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = math.pow(1024, 3) # 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
<commit_msg>Use ** instead of math.pow<commit_after>
|
from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = 1024 ** 3 # == math.pow(1024, 3) == 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
|
import math
from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = math.pow(1024, 3) # 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
Use ** instead of math.powfrom datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = 1024 ** 3 # == math.pow(1024, 3) == 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
|
<commit_before>import math
from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = math.pow(1024, 3) # 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
<commit_msg>Use ** instead of math.pow<commit_after>from datetime import timedelta
ARCHIVE_PROVIDER = 'osfstorage'
MAX_ARCHIVE_SIZE = 1024 ** 3 # == math.pow(1024, 3) == 1 GB
MAX_FILE_SIZE = MAX_ARCHIVE_SIZE # TODO limit file size?
ARCHIVE_TIMEOUT_TIMEDELTA = timedelta(1) # 24 hours
|
61595ed284a132c3c32975a6bfe7642c51c45c7b
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
# 177:24 warning hexidecimal color should be a variable colors
# 177 warning hexidecimal color should be a variable colors
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
Add some more output examples
|
Add some more output examples
|
Python
|
mit
|
jackbrewer/SublimeLinter-contrib-stylint
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
Add some more output examples
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
# 177:24 warning hexidecimal color should be a variable colors
# 177 warning hexidecimal color should be a variable colors
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
<commit_msg>Add some more output examples<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
# 177:24 warning hexidecimal color should be a variable colors
# 177 warning hexidecimal color should be a variable colors
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
Add some more output examples#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
# 177:24 warning hexidecimal color should be a variable colors
# 177 warning hexidecimal color should be a variable colors
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
<commit_msg>Add some more output examples<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""Exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = ('stylus', 'vue')
selectors = {'vue': 'source.stylus.embedded.html'}
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
# 177:24 warning hexidecimal color should be a variable colors
# 177 warning hexidecimal color should be a variable colors
^(?P<line>\d+):?(?P<col>\d+)?\s*(?P<rule>\w+)?\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
8b037b975869b4ad7361aa4690a52a65915550b8
|
writer.py
|
writer.py
|
from graph import Graph
class Writer:
'''
Write a graph into file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: string that contains the file location
@rtype: boolean
@return: True, if the file was written successfully and False if
someting went wrong
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
|
from graph import Graph
class Writer:
'''
Write a graph or a list of nodes into a file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
f.close()
def write_nodes(self, nodes, file_location):
'''
Writes a list of nodes into a file with their x and y coordinates
@type nodes: list
@param: nodes: a list of nodes.
@type file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
for node in nodes:
print('{0} {1} {2}'.format(node.label, node.x, node.y), file=f)
f.close()
|
Add a function to write a sequence of nodes into a file
|
Add a function to write a sequence of nodes into a file
|
Python
|
isc
|
exul/semesterarbeit
|
from graph import Graph
class Writer:
'''
Write a graph into file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: string that contains the file location
@rtype: boolean
@return: True, if the file was written successfully and False if
someting went wrong
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
Add a function to write a sequence of nodes into a file
|
from graph import Graph
class Writer:
'''
Write a graph or a list of nodes into a file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
f.close()
def write_nodes(self, nodes, file_location):
'''
Writes a list of nodes into a file with their x and y coordinates
@type nodes: list
@param: nodes: a list of nodes.
@type file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
for node in nodes:
print('{0} {1} {2}'.format(node.label, node.x, node.y), file=f)
f.close()
|
<commit_before>from graph import Graph
class Writer:
'''
Write a graph into file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: string that contains the file location
@rtype: boolean
@return: True, if the file was written successfully and False if
someting went wrong
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
<commit_msg>Add a function to write a sequence of nodes into a file<commit_after>
|
from graph import Graph
class Writer:
'''
Write a graph or a list of nodes into a file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
f.close()
def write_nodes(self, nodes, file_location):
'''
Writes a list of nodes into a file with their x and y coordinates
@type nodes: list
@param: nodes: a list of nodes.
@type file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
for node in nodes:
print('{0} {1} {2}'.format(node.label, node.x, node.y), file=f)
f.close()
|
from graph import Graph
class Writer:
'''
Write a graph into file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: string that contains the file location
@rtype: boolean
@return: True, if the file was written successfully and False if
someting went wrong
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
Add a function to write a sequence of nodes into a filefrom graph import Graph
class Writer:
'''
Write a graph or a list of nodes into a file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
f.close()
def write_nodes(self, nodes, file_location):
'''
Writes a list of nodes into a file with their x and y coordinates
@type nodes: list
@param: nodes: a list of nodes.
@type file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
for node in nodes:
print('{0} {1} {2}'.format(node.label, node.x, node.y), file=f)
f.close()
|
<commit_before>from graph import Graph
class Writer:
'''
Write a graph into file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: string that contains the file location
@rtype: boolean
@return: True, if the file was written successfully and False if
someting went wrong
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
<commit_msg>Add a function to write a sequence of nodes into a file<commit_after>from graph import Graph
class Writer:
'''
Write a graph or a list of nodes into a file.
'''
def write_blossom_iv(self, graph, file_location):
'''
Write a graph to a file, use the blossom IV format
@type: graph: graph
@param: graph: graph that should be written to file
@type: file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
# write number of nodes and edges
print('{0} {1}'.format(graph.size, graph.edge_count), file=f)
# write and edge on every line
# ID node_1 node_2 weight
#TODO: Use a more generic solution, do not just print odd_node_nr
for node in graph.nodes:
for neighbour in graph.neighbour_nodes(node):
edge_list = graph.edge_by_nodes(node, neighbour)
for edge in edge_list:
print('{0} {1} {2}' \
.format(node.odd_node_nr, neighbour.odd_node_nr,
edge.weight), \
file=f)
f.close()
def write_nodes(self, nodes, file_location):
'''
Writes a list of nodes into a file with their x and y coordinates
@type nodes: list
@param: nodes: a list of nodes.
@type file_location: string
@param: location to save the file
'''
f = open(file_location, 'w')
for node in nodes:
print('{0} {1} {2}'.format(node.label, node.x, node.y), file=f)
f.close()
|
352c59a8f6ea12cef30b84c5778bc8991c53af4b
|
models.py
|
models.py
|
import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page."""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField()
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile."""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
|
import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page.
This is an example of defining a fixed schema with validation.
"""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField(required=True)
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile.
This is an example of saving raw JSON data into Mongo with no constraints.
"""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
|
Add more validation. Add to docstrings.
|
Add more validation. Add to docstrings.
|
Python
|
mit
|
CodeSelfStudy/flask_mongoengine_example,CodeSelfStudy/flask_mongoengine_example
|
import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page."""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField()
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile."""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
Add more validation. Add to docstrings.
|
import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page.
This is an example of defining a fixed schema with validation.
"""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField(required=True)
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile.
This is an example of saving raw JSON data into Mongo with no constraints.
"""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
|
<commit_before>import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page."""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField()
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile."""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
<commit_msg>Add more validation. Add to docstrings.<commit_after>
|
import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page.
This is an example of defining a fixed schema with validation.
"""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField(required=True)
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile.
This is an example of saving raw JSON data into Mongo with no constraints.
"""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
|
import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page."""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField()
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile."""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
Add more validation. Add to docstrings.import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page.
This is an example of defining a fixed schema with validation.
"""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField(required=True)
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile.
This is an example of saving raw JSON data into Mongo with no constraints.
"""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
|
<commit_before>import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page."""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField()
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile."""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
<commit_msg>Add more validation. Add to docstrings.<commit_after>import mongoengine as me
from datetime import datetime
from config import DATABASE_NAME
# Connect to the Mongo database
me.connect(DATABASE_NAME)
class Page(me.Document):
"""Represents a page.
This is an example of defining a fixed schema with validation.
"""
title = me.StringField(required=True)
author = me.StringField(required=True)
body = me.StringField(required=True)
created_at = me.DateTimeField(default=datetime.now)
class Profile(me.Document):
"""Represents a Github user profile.
This is an example of saving raw JSON data into Mongo with no constraints.
"""
raw_json = me.DynamicField()
downloaded_at = me.DateTimeField(default=datetime.now)
|
42fd4ef247606155a9282001c9df1c6401a9fcb4
|
autocomplete_light/__init__.py
|
autocomplete_light/__init__.py
|
"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 7):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
|
"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 9):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
|
Maintain BC again in autocomplete_light ns until dj19
|
Maintain BC again in autocomplete_light ns until dj19
|
Python
|
mit
|
Perkville/django-autocomplete-light,Eraldo/django-autocomplete-light,dsanders11/django-autocomplete-light,shubhamdipt/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,Visgean/django-autocomplete-light,shubhamdipt/django-autocomplete-light,dsanders11/django-autocomplete-light,shubhamdipt/django-autocomplete-light,Visgean/django-autocomplete-light,luzfcb/django-autocomplete-light,Perkville/django-autocomplete-light,Eraldo/django-autocomplete-light,Visgean/django-autocomplete-light,yourlabs/django-autocomplete-light,yourlabs/django-autocomplete-light,dsanders11/django-autocomplete-light,Perkville/django-autocomplete-light,Visgean/django-autocomplete-light,Eraldo/django-autocomplete-light,Eraldo/django-autocomplete-light,shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,yourlabs/django-autocomplete-light,Perkville/django-autocomplete-light,luzfcb/django-autocomplete-light,luzfcb/django-autocomplete-light
|
"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 7):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
Maintain BC again in autocomplete_light ns until dj19
|
"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 9):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
|
<commit_before>"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 7):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
<commit_msg>Maintain BC again in autocomplete_light ns until dj19<commit_after>
|
"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 9):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
|
"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 7):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
Maintain BC again in autocomplete_light ns until dj19"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 9):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
|
<commit_before>"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 7):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
<commit_msg>Maintain BC again in autocomplete_light ns until dj19<commit_after>"""
Provide tools to enable nice autocompletes in your Django project.
"""
from django import VERSION
if VERSION < (1, 9):
from .shortcuts import * # noqa
else:
from .views import * # noqa
from .forms import * # noqa
default_app_config = 'autocomplete_light.apps.AutocompleteLightConfig'
|
13fc59baa49d4f49d4e6ad9e0766ae2aede2fc25
|
proselint/command_line.py
|
proselint/command_line.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
Raise an error if no file is specified
|
Raise an error if no file is specified
|
Python
|
bsd-3-clause
|
jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
Raise an error if no file is specified
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
<commit_msg>Raise an error if no file is specified<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
Raise an error if no file is specified#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
<commit_msg>Raise an error if no file is specified<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Command line utility for proselint."""
import click
import os
import imp
def log_error(line, column, error_code, msg):
"""Print a message to the command line."""
click.echo(str(line) + ":" +
str(column) + " \t" +
error_code + ": " +
msg + " " +
"http://lifelinter.com/" + error_code)
@click.command()
@click.option('--version/--whatever', default=False)
@click.argument('file', default=False)
def proselint(version, file):
"""Run the linter."""
if not file:
raise ValueError("Specify a file to lint using the --file flag.")
# Extract functions from the checks folder.
checks = []
listing = os.listdir(
os.path.join(os.path.dirname(os.path.realpath(__file__)), "checks"))
for f in listing:
if f[-3:] == ".py" and not f == "__init__.py":
m = imp.load_source("rule", os.path.join("proselint", "checks", f))
checks.append(getattr(m, 'check'))
# Return the version number.
if version:
print "v0.0.1"
# Apply all the checks.
else:
with open(file, "r") as f:
text = f.read()
for check in checks:
errors = check(text)
for error in errors:
log_error(*error)
|
cd590194c4a3d3455cf05bd922352ead2729982d
|
src/encoded/views/__init__.py
|
src/encoded/views/__init__.py
|
from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
collection = context.by_item_type[item_type]
return collection.schema
|
from pyramid.httpexceptions import HTTPNotFound
from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
try:
collection = context.by_item_type[item_type]
except KeyError:
raise HTTPNotFound(item_type)
return collection.schema
|
Raise not Found on schema item_type error.
|
Raise not Found on schema item_type error.
|
Python
|
mit
|
hms-dbmi/fourfront,philiptzou/clincoded,4dn-dcic/fourfront,ClinGen/clincoded,ENCODE-DCC/encoded,ClinGen/clincoded,kidaa/encoded,T2DREAM/t2dream-portal,philiptzou/clincoded,hms-dbmi/fourfront,ClinGen/clincoded,philiptzou/clincoded,ENCODE-DCC/snovault,ENCODE-DCC/encoded,ClinGen/clincoded,ENCODE-DCC/encoded,ENCODE-DCC/snovault,philiptzou/clincoded,4dn-dcic/fourfront,ENCODE-DCC/snovault,ENCODE-DCC/snovault,4dn-dcic/fourfront,hms-dbmi/fourfront,kidaa/encoded,ENCODE-DCC/snovault,T2DREAM/t2dream-portal,T2DREAM/t2dream-portal,T2DREAM/t2dream-portal,kidaa/encoded,philiptzou/clincoded,ENCODE-DCC/encoded,hms-dbmi/fourfront,ClinGen/clincoded,hms-dbmi/fourfront,kidaa/encoded,kidaa/encoded,4dn-dcic/fourfront
|
from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
collection = context.by_item_type[item_type]
return collection.schema
Raise not Found on schema item_type error.
|
from pyramid.httpexceptions import HTTPNotFound
from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
try:
collection = context.by_item_type[item_type]
except KeyError:
raise HTTPNotFound(item_type)
return collection.schema
|
<commit_before>from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
collection = context.by_item_type[item_type]
return collection.schema
<commit_msg>Raise not Found on schema item_type error.<commit_after>
|
from pyramid.httpexceptions import HTTPNotFound
from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
try:
collection = context.by_item_type[item_type]
except KeyError:
raise HTTPNotFound(item_type)
return collection.schema
|
from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
collection = context.by_item_type[item_type]
return collection.schema
Raise not Found on schema item_type error.from pyramid.httpexceptions import HTTPNotFound
from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
try:
collection = context.by_item_type[item_type]
except KeyError:
raise HTTPNotFound(item_type)
return collection.schema
|
<commit_before>from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
collection = context.by_item_type[item_type]
return collection.schema
<commit_msg>Raise not Found on schema item_type error.<commit_after>from pyramid.httpexceptions import HTTPNotFound
from pyramid.view import view_config
from random import randint
from ..contentbase import (
Root,
location_root,
)
def includeme(config):
# Random processid so etags are invalidated after restart.
config.registry['encoded.processid'] = randint(0, 2 ** 32)
config.add_route('schema', '/profiles/{item_type}.json')
config.scan()
@location_root
class EncodedRoot(Root):
properties = {
'title': 'Home',
'portal_title': 'ENCODE 3',
}
@view_config(context=Root, request_method='GET')
def home(context, request):
result = context.__json__(request)
result.update({
'@id': request.resource_path(context),
'@type': ['portal'],
# 'login': {'href': request.resource_path(context, 'login')},
})
return result
@view_config(route_name='schema', request_method='GET')
def schema(context, request):
item_type = request.matchdict['item_type']
try:
collection = context.by_item_type[item_type]
except KeyError:
raise HTTPNotFound(item_type)
return collection.schema
|
96b06d80f6108997fab44ac1e6042fcae93cc82a
|
server.py
|
server.py
|
# coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'inde': 'pitools service'
}))
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
|
# coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'index': 'pitools service'
}))
self.finish()
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
|
Fix typo; Fix request never finish
|
Fix typo; Fix request never finish
|
Python
|
bsd-2-clause
|
JokerQyou/pitools
|
# coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'inde': 'pitools service'
}))
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
Fix typo; Fix request never finish
|
# coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'index': 'pitools service'
}))
self.finish()
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
|
<commit_before># coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'inde': 'pitools service'
}))
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
<commit_msg>Fix typo; Fix request never finish<commit_after>
|
# coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'index': 'pitools service'
}))
self.finish()
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
|
# coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'inde': 'pitools service'
}))
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
Fix typo; Fix request never finish# coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'index': 'pitools service'
}))
self.finish()
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
|
<commit_before># coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'inde': 'pitools service'
}))
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
<commit_msg>Fix typo; Fix request never finish<commit_after># coding: utf-8
import json
import tornado.ioloop
import tornado.web
import Adafruit_BMP.BMP085 as BMP085
class SensorAccess(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps(self.read_sensor()))
self.finish()
def read_sensor(self):
pass
class TempSensorAccess(SensorAccess):
def read_sensor(self):
sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
return {
'temperature': sensor.read_temperature(),
'pressure': sensor.read_pressure(),
}
class IndexHandler(tornado.web.RequestHandler):
@tornado.web.asynchronous
def get(self):
self.write(json.dumps({
'index': 'pitools service'
}))
self.finish()
def start_server():
application = tornado.web.Application([
(r"/", IndexHandler),
(r"/sensors/env", TempSensorAccess),
])
application.listen(9876)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
start_server()
|
2231c0384e56af56285999bc0bf7a096d3dd1cb9
|
pyuploadcare/dj/models.py
|
pyuploadcare/dj/models.py
|
from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, basestring):
return UploadCare().file(value)
if isinstance(value, File):
return value
raise ValidationError('Invalid value for a field')
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
|
from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.exceptions import InvalidRequestError
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, File):
return value
if not isinstance(value, basestring):
raise ValidationError(
u'Invalid value for a field: string was expected'
)
try:
return UploadCare().file(value)
except InvalidRequestError as exc:
raise ValidationError(
u'Invalid value for a field: {exc}'.format(exc=exc)
)
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
|
Add handling of InvalidRequestError in ``to_python`
|
Add handling of InvalidRequestError in ``to_python`
|
Python
|
mit
|
uploadcare/pyuploadcare
|
from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, basestring):
return UploadCare().file(value)
if isinstance(value, File):
return value
raise ValidationError('Invalid value for a field')
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
Add handling of InvalidRequestError in ``to_python`
|
from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.exceptions import InvalidRequestError
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, File):
return value
if not isinstance(value, basestring):
raise ValidationError(
u'Invalid value for a field: string was expected'
)
try:
return UploadCare().file(value)
except InvalidRequestError as exc:
raise ValidationError(
u'Invalid value for a field: {exc}'.format(exc=exc)
)
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
|
<commit_before>from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, basestring):
return UploadCare().file(value)
if isinstance(value, File):
return value
raise ValidationError('Invalid value for a field')
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
<commit_msg>Add handling of InvalidRequestError in ``to_python`<commit_after>
|
from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.exceptions import InvalidRequestError
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, File):
return value
if not isinstance(value, basestring):
raise ValidationError(
u'Invalid value for a field: string was expected'
)
try:
return UploadCare().file(value)
except InvalidRequestError as exc:
raise ValidationError(
u'Invalid value for a field: {exc}'.format(exc=exc)
)
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
|
from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, basestring):
return UploadCare().file(value)
if isinstance(value, File):
return value
raise ValidationError('Invalid value for a field')
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
Add handling of InvalidRequestError in ``to_python`from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.exceptions import InvalidRequestError
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, File):
return value
if not isinstance(value, basestring):
raise ValidationError(
u'Invalid value for a field: string was expected'
)
try:
return UploadCare().file(value)
except InvalidRequestError as exc:
raise ValidationError(
u'Invalid value for a field: {exc}'.format(exc=exc)
)
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
|
<commit_before>from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, basestring):
return UploadCare().file(value)
if isinstance(value, File):
return value
raise ValidationError('Invalid value for a field')
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
<commit_msg>Add handling of InvalidRequestError in ``to_python`<commit_after>from django.db import models
from django.core.exceptions import ValidationError
from pyuploadcare.dj import forms, UploadCare
from pyuploadcare.exceptions import InvalidRequestError
from pyuploadcare.file import File
class FileField(models.Field):
__metaclass__ = models.SubfieldBase
description = "UploadCare file id/URI with cached data"
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, File):
return value
if not isinstance(value, basestring):
raise ValidationError(
u'Invalid value for a field: string was expected'
)
try:
return UploadCare().file(value)
except InvalidRequestError as exc:
raise ValidationError(
u'Invalid value for a field: {exc}'.format(exc=exc)
)
def get_prep_value(self, value):
return value.serialize()
def get_db_prep_save(self, value, connection=None):
if value:
value.store()
return value.serialize()
def value_to_string(self, obj):
assert False
def formfield(self, **kwargs):
defaults = {'widget': forms.FileWidget, 'form_class': forms.FileField}
defaults.update(kwargs)
# yay for super!
return super(FileField, self).formfield(**defaults)
|
e7582e164b034f3f4fa63dc2fca5ae7b6ae78f2e
|
Discord/utilities/converters.py
|
Discord/utilities/converters.py
|
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - 76561197960265728
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - 76561197960265728
|
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# https://developer.valvesoftware.com/wiki/SteamID
STEAM_ID_64_BASE = int("0x110000100000000", 16) # Assuming Public Individual user account
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - STEAM_ID_64_BASE
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - STEAM_ID_64_BASE
|
Use constant for SteamID64 base for Steam Account converter
|
[Discord] Use constant for SteamID64 base for Steam Account converter
|
Python
|
mit
|
Harmon758/Harmonbot,Harmon758/Harmonbot
|
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - 76561197960265728
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - 76561197960265728
[Discord] Use constant for SteamID64 base for Steam Account converter
|
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# https://developer.valvesoftware.com/wiki/SteamID
STEAM_ID_64_BASE = int("0x110000100000000", 16) # Assuming Public Individual user account
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - STEAM_ID_64_BASE
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - STEAM_ID_64_BASE
|
<commit_before>
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - 76561197960265728
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - 76561197960265728
<commit_msg>[Discord] Use constant for SteamID64 base for Steam Account converter<commit_after>
|
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# https://developer.valvesoftware.com/wiki/SteamID
STEAM_ID_64_BASE = int("0x110000100000000", 16) # Assuming Public Individual user account
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - STEAM_ID_64_BASE
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - STEAM_ID_64_BASE
|
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - 76561197960265728
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - 76561197960265728
[Discord] Use constant for SteamID64 base for Steam Account converter
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# https://developer.valvesoftware.com/wiki/SteamID
STEAM_ID_64_BASE = int("0x110000100000000", 16) # Assuming Public Individual user account
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - STEAM_ID_64_BASE
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - STEAM_ID_64_BASE
|
<commit_before>
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - 76561197960265728
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - 76561197960265728
<commit_msg>[Discord] Use constant for SteamID64 base for Steam Account converter<commit_after>
from discord.ext import commands
class Maptype(commands.Converter):
'''
For Google Maps Static API parameter
https://developers.google.com/maps/documentation/maps-static/dev-guide
'''
async def convert(self, ctx, argument):
if argument not in ("roadmap", "satellite", "hybrid", "terrain"):
raise commands.BadArgument("Invalid map type")
return argument
# https://developer.valvesoftware.com/wiki/SteamID
STEAM_ID_64_BASE = int("0x110000100000000", 16) # Assuming Public Individual user account
# TODO: Use for steam gamecount command?
class SteamAccount(commands.Converter):
async def convert(self, ctx, argument):
try:
return int(argument) - STEAM_ID_64_BASE
except ValueError:
url = "http://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/"
params = {"key": ctx.bot.STEAM_WEB_API_KEY, "vanityurl": argument}
async with ctx.bot.aiohttp_session.get(url, params = params) as resp:
# TODO: Handle 429?
data = await resp.json()
if data["response"]["success"] == 42: # NoMatch, https://partner.steamgames.com/doc/api/steam_api#EResult
raise commands.BadArgument("Account not found")
return int(data['response']['steamid']) - STEAM_ID_64_BASE
|
862fa3a737ef944899958551c66bcf6ad8fb4c86
|
packages/nuget.py
|
packages/nuget.py
|
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make} PREFIX=%{prefix}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
Set PREFIX in make as well as makeinstall
|
Set PREFIX in make as well as makeinstall
|
Python
|
mit
|
BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild
|
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
Set PREFIX in make as well as makeinstall
|
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make} PREFIX=%{prefix}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
<commit_before>
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
<commit_msg>Set PREFIX in make as well as makeinstall<commit_after>
|
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make} PREFIX=%{prefix}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
Set PREFIX in make as well as makeinstall
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make} PREFIX=%{prefix}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
<commit_before>
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
<commit_msg>Set PREFIX in make as well as makeinstall<commit_after>
class NuGetPackage(GitHubTarballPackage):
def __init__(self):
GitHubTarballPackage.__init__(self,
'mono', 'nuget',
'2.8.1',
'7b20cd5408852e725bd14a13855bd238506c6a19',
configure = '')
def build(self):
self.sh ('%{make} PREFIX=%{prefix}')
def install(self):
self.sh ('%{makeinstall} PREFIX=%{prefix}')
NuGetPackage()
|
0e2bfd59ca9db6568bac40504977d80b8ad84aba
|
helga_prod_fixer.py
|
helga_prod_fixer.py
|
import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
|
import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Did you try rebooting {thing}? Try that first.',
'{thing} is only IE6 compatible. Make sure you\'re using the right browser.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
|
Reboot and IE6 compatibility fixer messages
|
Reboot and IE6 compatibility fixer messages
|
Python
|
mit
|
shaunduncan/helga-prod-fixer
|
import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
Reboot and IE6 compatibility fixer messages
|
import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Did you try rebooting {thing}? Try that first.',
'{thing} is only IE6 compatible. Make sure you\'re using the right browser.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
|
<commit_before>import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
<commit_msg>Reboot and IE6 compatibility fixer messages<commit_after>
|
import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Did you try rebooting {thing}? Try that first.',
'{thing} is only IE6 compatible. Make sure you\'re using the right browser.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
|
import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
Reboot and IE6 compatibility fixer messagesimport random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Did you try rebooting {thing}? Try that first.',
'{thing} is only IE6 compatible. Make sure you\'re using the right browser.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
|
<commit_before>import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
<commit_msg>Reboot and IE6 compatibility fixer messages<commit_after>import random
from helga.plugins import command
RESPONSES = [
'There is no hope for {thing}, {nick}',
'It looks ok to me...',
'Did you power cycle {thing}? Are any of the lights blinking?',
'I\'ll take {thing} to the Genius Bar after work',
'Can we look at this tomorrow? I have Com Truise tickets...',
'Just tell them not to use {thing} for now.',
'Did you try rebooting {thing}? Try that first.',
'{thing} is only IE6 compatible. Make sure you\'re using the right browser.',
'Turning {thing} off and back on again',
'I really wish I could, but it looks past the point of no return',
]
@command('fix', help='Usage: helga fix <thing>')
def fix(client, channel, nick, message, cmd, args):
return random.choice(RESPONSES).format(nick=nick, thing=' '.join(args))
|
981d0473a24d52fb19e8da1a2af18c9f8823dd29
|
heufybot/factory.py
|
heufybot/factory.py
|
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
|
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
self.bot.moduleHandler.runGenericAction("disconnect", connector.host)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
|
Add an action for server disconnects
|
Add an action for server disconnects
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
Add an action for server disconnects
|
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
self.bot.moduleHandler.runGenericAction("disconnect", connector.host)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
|
<commit_before>from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
<commit_msg>Add an action for server disconnects<commit_after>
|
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
self.bot.moduleHandler.runGenericAction("disconnect", connector.host)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
|
from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
Add an action for server disconnectsfrom twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
self.bot.moduleHandler.runGenericAction("disconnect", connector.host)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
|
<commit_before>from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
<commit_msg>Add an action for server disconnects<commit_after>from twisted.internet.protocol import ClientFactory, ReconnectingClientFactory
from heufybot.connection import HeufyBotConnection
class HeufyBotFactory(ReconnectingClientFactory):
protocol = HeufyBotConnection
def __init__(self, bot):
self.bot = bot
self.currentlyDisconnecting = []
def buildProtocol(self, addr):
self.resetDelay()
return self.protocol(self.bot)
def clientConnectionFailed(self, connector, reason):
self.bot.log.info("Client connection to {connector.host} failed (Reason: {reason.value}).",
connector=connector, reason=reason)
ReconnectingClientFactory.clientConnectionFailed(self, connector, reason)
def clientConnectionLost(self, connector, reason):
# Disable modules
if connector.host in self.bot.moduleHandler.enabledModules:
for module in self.bot.moduleHandler.enabledModules[connector.host]:
self.bot.moduleHandler.disableModule(module, connector.host, True)
self.bot.moduleHandler.runGenericAction("disconnect", connector.host)
del self.bot.servers[connector.host]
# Check whether or not we should reconnect
if connector.host in self.currentlyDisconnecting:
self.bot.log.info("Connection to {connector.host} was closed cleanly.", connector=connector)
ClientFactory.clientConnectionLost(self, connector, reason)
self.currentlyDisconnecting.remove(connector.host)
self.bot.countConnections()
else:
ReconnectingClientFactory.clientConnectionLost(self, connector, reason)
|
6642a04ffb999188391b4f35e248119cb824aed3
|
test/expression_command/radar_9531204/TestPrintfAfterUp.py
|
test/expression_command/radar_9531204/TestPrintfAfterUp.py
|
"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
@expectedFailureFreeBSD('llvm.org/pr17183')
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
Remove decorator for now-passing test
|
Remove decorator for now-passing test
The underlying issue was actually a Clang bug, now fixed. The test now
reports XPASS for me locally and on the buildbot.
llvm.org/pr17183 (LLDB)
llvm.org/pr18950 (Clang)
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@206761 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb
|
"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
@expectedFailureFreeBSD('llvm.org/pr17183')
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
Remove decorator for now-passing test
The underlying issue was actually a Clang bug, now fixed. The test now
reports XPASS for me locally and on the buildbot.
llvm.org/pr17183 (LLDB)
llvm.org/pr18950 (Clang)
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@206761 91177308-0d34-0410-b5e6-96231b3b80d8
|
"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
<commit_before>"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
@expectedFailureFreeBSD('llvm.org/pr17183')
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
<commit_msg>Remove decorator for now-passing test
The underlying issue was actually a Clang bug, now fixed. The test now
reports XPASS for me locally and on the buildbot.
llvm.org/pr17183 (LLDB)
llvm.org/pr18950 (Clang)
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@206761 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
@expectedFailureFreeBSD('llvm.org/pr17183')
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
Remove decorator for now-passing test
The underlying issue was actually a Clang bug, now fixed. The test now
reports XPASS for me locally and on the buildbot.
llvm.org/pr17183 (LLDB)
llvm.org/pr18950 (Clang)
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@206761 91177308-0d34-0410-b5e6-96231b3b80d8"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
<commit_before>"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
@expectedFailureFreeBSD('llvm.org/pr17183')
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
<commit_msg>Remove decorator for now-passing test
The underlying issue was actually a Clang bug, now fixed. The test now
reports XPASS for me locally and on the buildbot.
llvm.org/pr17183 (LLDB)
llvm.org/pr18950 (Clang)
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@206761 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>"""
The evaluating printf(...) after break stop and then up a stack frame.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class Radar9531204TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
# rdar://problem/9531204
def test_expr_commands(self):
"""The evaluating printf(...) after break stop and then up a stack frame."""
self.buildDefault()
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_symbol (self, 'foo', sym_exact=True, num_expected_locations=1)
self.runCmd("run", RUN_SUCCEEDED)
self.runCmd("frame variable")
# This works fine.
self.runCmd('expression (int)printf("value is: %d.\\n", value);')
# rdar://problem/9531204
# "Error dematerializing struct" error when evaluating expressions "up" on the stack
self.runCmd('up') # frame select -r 1
self.runCmd("frame variable")
# This does not currently.
self.runCmd('expression (int)printf("argc is: %d.\\n", argc)')
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
441f950efac0197e73fa46cf423793f28402f532
|
yaml_storage.py
|
yaml_storage.py
|
import yaml
import sys
from tinydb.storages import Storage
class YAMLStorage(Storage):
def __init__(self, filename): # (1)
self.filename = filename
def read(self):
with open(self.filename) as handle:
try:
data = yaml.safe_load(handle.read()) # (2)
return data
except yaml.YAMLError:
return None # (3)
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(yaml.safe_load(str(data)), handle)
def close(self): # (4)
pass
|
import yaml
import sys
from tinydb.database import Document
from tinydb.storages import Storage, touch
def represent_doc(dumper, data):
# Represent `Document` objects as their dict's string representation
# which PyYAML understands
return dumper.represent_data(dict(data))
yaml.add_representer(Document, represent_doc)
class YAMLStorage(Storage):
def __init__(self, filename):
self.filename = filename
touch(filename, False)
def read(self):
with open(self.filename) as handle:
data = yaml.safe_load(handle.read())
return data
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(data, handle)
def close(self):
pass
|
Fix YAMLStorage as per TinyDB doc changes
|
Fix YAMLStorage as per TinyDB doc changes
|
Python
|
mit
|
msembinelli/mpm
|
import yaml
import sys
from tinydb.storages import Storage
class YAMLStorage(Storage):
def __init__(self, filename): # (1)
self.filename = filename
def read(self):
with open(self.filename) as handle:
try:
data = yaml.safe_load(handle.read()) # (2)
return data
except yaml.YAMLError:
return None # (3)
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(yaml.safe_load(str(data)), handle)
def close(self): # (4)
pass
Fix YAMLStorage as per TinyDB doc changes
|
import yaml
import sys
from tinydb.database import Document
from tinydb.storages import Storage, touch
def represent_doc(dumper, data):
# Represent `Document` objects as their dict's string representation
# which PyYAML understands
return dumper.represent_data(dict(data))
yaml.add_representer(Document, represent_doc)
class YAMLStorage(Storage):
def __init__(self, filename):
self.filename = filename
touch(filename, False)
def read(self):
with open(self.filename) as handle:
data = yaml.safe_load(handle.read())
return data
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(data, handle)
def close(self):
pass
|
<commit_before>import yaml
import sys
from tinydb.storages import Storage
class YAMLStorage(Storage):
def __init__(self, filename): # (1)
self.filename = filename
def read(self):
with open(self.filename) as handle:
try:
data = yaml.safe_load(handle.read()) # (2)
return data
except yaml.YAMLError:
return None # (3)
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(yaml.safe_load(str(data)), handle)
def close(self): # (4)
pass
<commit_msg>Fix YAMLStorage as per TinyDB doc changes<commit_after>
|
import yaml
import sys
from tinydb.database import Document
from tinydb.storages import Storage, touch
def represent_doc(dumper, data):
# Represent `Document` objects as their dict's string representation
# which PyYAML understands
return dumper.represent_data(dict(data))
yaml.add_representer(Document, represent_doc)
class YAMLStorage(Storage):
def __init__(self, filename):
self.filename = filename
touch(filename, False)
def read(self):
with open(self.filename) as handle:
data = yaml.safe_load(handle.read())
return data
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(data, handle)
def close(self):
pass
|
import yaml
import sys
from tinydb.storages import Storage
class YAMLStorage(Storage):
def __init__(self, filename): # (1)
self.filename = filename
def read(self):
with open(self.filename) as handle:
try:
data = yaml.safe_load(handle.read()) # (2)
return data
except yaml.YAMLError:
return None # (3)
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(yaml.safe_load(str(data)), handle)
def close(self): # (4)
pass
Fix YAMLStorage as per TinyDB doc changesimport yaml
import sys
from tinydb.database import Document
from tinydb.storages import Storage, touch
def represent_doc(dumper, data):
# Represent `Document` objects as their dict's string representation
# which PyYAML understands
return dumper.represent_data(dict(data))
yaml.add_representer(Document, represent_doc)
class YAMLStorage(Storage):
def __init__(self, filename):
self.filename = filename
touch(filename, False)
def read(self):
with open(self.filename) as handle:
data = yaml.safe_load(handle.read())
return data
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(data, handle)
def close(self):
pass
|
<commit_before>import yaml
import sys
from tinydb.storages import Storage
class YAMLStorage(Storage):
def __init__(self, filename): # (1)
self.filename = filename
def read(self):
with open(self.filename) as handle:
try:
data = yaml.safe_load(handle.read()) # (2)
return data
except yaml.YAMLError:
return None # (3)
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(yaml.safe_load(str(data)), handle)
def close(self): # (4)
pass
<commit_msg>Fix YAMLStorage as per TinyDB doc changes<commit_after>import yaml
import sys
from tinydb.database import Document
from tinydb.storages import Storage, touch
def represent_doc(dumper, data):
# Represent `Document` objects as their dict's string representation
# which PyYAML understands
return dumper.represent_data(dict(data))
yaml.add_representer(Document, represent_doc)
class YAMLStorage(Storage):
def __init__(self, filename):
self.filename = filename
touch(filename, False)
def read(self):
with open(self.filename) as handle:
data = yaml.safe_load(handle.read())
return data
def write(self, data):
with open(self.filename, 'w') as handle:
yaml.dump(data, handle)
def close(self):
pass
|
669433c6d83917f0e3939c13dcbdc328536d9bae
|
project_fish/whats_fresh/models.py
|
project_fish/whats_fresh/models.py
|
from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
|
from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='images/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
|
Change image upload directory to images/
|
Change image upload directory to images/
|
Python
|
apache-2.0
|
iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api,iCHAIT/whats-fresh-api,osu-cass/whats-fresh-api,iCHAIT/whats-fresh-api
|
from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
Change image upload directory to images/
|
from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='images/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
|
<commit_before>from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
<commit_msg>Change image upload directory to images/<commit_after>
|
from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='images/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
|
from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
Change image upload directory to images/from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='images/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
|
<commit_before>from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
<commit_msg>Change image upload directory to images/<commit_after>from django.contrib.gis.db import models
import os
from phonenumber_field.modelfields import PhoneNumberField
class Image(models.Model):
"""
The Image model holds an image and related data.
The Created and Modified time fields are created automatically by
Django when the object is created or modified, and can not be altered.
This model uses Django's built-ins for holding the image location and
data in the database, as well as for keeping created and modified
timestamps.
"""
image = models.ImageField(upload_to='images/')
caption = models.TextField()
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Vendor(models.Model):
"""
The Vendor model holds the information for a vendor, including the
geographic location as a pair of latitudinal/logitudinal coordinates,
a street address, and an optional text description of their location
(in case the address/coordinates are of, say, a dock instead of a shop).
"""
pass
|
c544010f595412f1962fd2d12f25f5eac47bd683
|
examples/simple/basic.py
|
examples/simple/basic.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.modes.append(modes.CaretLineHighlighterMode())
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.CaretLineHighlighterMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
|
Fix install order to fix wrong caret line color
|
Fix install order to fix wrong caret line color
|
Python
|
mit
|
pyQode/pyqode.core,zwadar/pyqode.core,pyQode/pyqode.core
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.modes.append(modes.CaretLineHighlighterMode())
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
Fix install order to fix wrong caret line color
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.CaretLineHighlighterMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.modes.append(modes.CaretLineHighlighterMode())
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
<commit_msg>Fix install order to fix wrong caret line color<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.CaretLineHighlighterMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.modes.append(modes.CaretLineHighlighterMode())
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
Fix install order to fix wrong caret line color#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.CaretLineHighlighterMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.modes.append(modes.CaretLineHighlighterMode())
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
<commit_msg>Fix install order to fix wrong caret line color<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
A simple example that shows how to setup CodeEdit.
In this example, we install a syntax highlighter mode (based on pygments), a
mode that highlights the current line and a _search and replace_ panel.
There are many other modes and panels, feel free to use this example as a
starting point to experiment.
"""
import logging
logging.basicConfig(level=logging.DEBUG)
import sys
from pyqode.core import api
from pyqode.core import modes
from pyqode.core import panels
from pyqode.qt import QtWidgets
def main():
app = QtWidgets.QApplication(sys.argv)
# create editor and window
window = QtWidgets.QMainWindow()
editor = api.CodeEdit()
window.setCentralWidget(editor)
# start the backend as soon as possible
editor.backend.start('server.py')
# append some modes and panels
editor.modes.append(modes.CodeCompletionMode())
editor.modes.append(modes.CaretLineHighlighterMode())
editor.modes.append(modes.PygmentsSyntaxHighlighter(editor.document()))
editor.panels.append(panels.SearchAndReplacePanel(),
api.Panel.Position.BOTTOM)
# open a file
editor.file.open(__file__)
# run
window.show()
app.exec_()
editor.file.close()
if __name__ == "__main__":
main()
|
d7bf66d84aee271cbcd99cd91eaedea8942d5a93
|
exponent/auth/service.py
|
exponent/auth/service.py
|
from exponent.auth import common
from twisted.cred import checkers, portal
from twisted.internet import defer
from twisted.protocols import amp
from zope import interface
def _getUserByIdentifier(rootStore, userIdentifier):
"""
Gets a user by uid.
"""
user = common.User.findUnique(rootStore, userIdentifier)
return defer.succeed(user)
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
|
from twisted.cred import checkers, portal
from twisted.protocols import amp
from zope import interface
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
|
Remove get user by id nonsense, now exponent.auth.name
|
Remove get user by id nonsense, now exponent.auth.name
|
Python
|
isc
|
lvh/exponent
|
from exponent.auth import common
from twisted.cred import checkers, portal
from twisted.internet import defer
from twisted.protocols import amp
from zope import interface
def _getUserByIdentifier(rootStore, userIdentifier):
"""
Gets a user by uid.
"""
user = common.User.findUnique(rootStore, userIdentifier)
return defer.succeed(user)
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
Remove get user by id nonsense, now exponent.auth.name
|
from twisted.cred import checkers, portal
from twisted.protocols import amp
from zope import interface
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
|
<commit_before>from exponent.auth import common
from twisted.cred import checkers, portal
from twisted.internet import defer
from twisted.protocols import amp
from zope import interface
def _getUserByIdentifier(rootStore, userIdentifier):
"""
Gets a user by uid.
"""
user = common.User.findUnique(rootStore, userIdentifier)
return defer.succeed(user)
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
<commit_msg>Remove get user by id nonsense, now exponent.auth.name<commit_after>
|
from twisted.cred import checkers, portal
from twisted.protocols import amp
from zope import interface
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
|
from exponent.auth import common
from twisted.cred import checkers, portal
from twisted.internet import defer
from twisted.protocols import amp
from zope import interface
def _getUserByIdentifier(rootStore, userIdentifier):
"""
Gets a user by uid.
"""
user = common.User.findUnique(rootStore, userIdentifier)
return defer.succeed(user)
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
Remove get user by id nonsense, now exponent.auth.namefrom twisted.cred import checkers, portal
from twisted.protocols import amp
from zope import interface
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
|
<commit_before>from exponent.auth import common
from twisted.cred import checkers, portal
from twisted.internet import defer
from twisted.protocols import amp
from zope import interface
def _getUserByIdentifier(rootStore, userIdentifier):
"""
Gets a user by uid.
"""
user = common.User.findUnique(rootStore, userIdentifier)
return defer.succeed(user)
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
<commit_msg>Remove get user by id nonsense, now exponent.auth.name<commit_after>from twisted.cred import checkers, portal
from twisted.protocols import amp
from zope import interface
class AuthenticationLocator(amp.CommandLocator):
"""
A base class for responder locators that allow users to authenticate.
"""
credentialInterfaces = []
def __init__(self, store):
"""
Initializes an authentication responder locator.
:param store: The root store.
"""
self.store = store
storeCheckers = store.powerupsFor(checkers.ICredentialsChecker)
self.portal = portal.Portal(Realm(store), storeCheckers)
def acquireStore(self, userIdentifier):
"""
Acquires a user store.
"""
@interface.implementer(portal.IRealm)
class Realm(object):
"""
A realm that produces box receivers for users.
"""
def __init__(self, getUserByUid):
self._getUser = getUserByUid
def requestAvatar(self, uid, mind, *interfaces):
"""
Attempts to get a lock on the user, then adapts it to ``IBoxReceiver``.
"""
if amp.IBoxReceiver not in interfaces:
raise NotImplementedError()
return self._getUser(uid).addCallback(_gotUser)
def _gotUser(user):
"""
Adapts the user to ``IBoxReceiver`` and returns a 3-tuple suitable
as the return value for ``requestAvatar``.
"""
return amp.IBoxReceiver, amp.IBoxReceiver(user), lambda: None
|
965a70bc4e8732fe02e83b9074698732253d5289
|
environ.py
|
environ.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os.path
EXTRA_SCRIPTS_DIR = '/tmp/extra-scripts'
def installerRunning():
return os.path.isdir(EXTRA_SCRIPTS_DIR)
|
#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os
EXTRA_SCRIPTS_DIR = '/mnt'
def installerRunning():
return os.environ.get('XS_INSTALLATION', '0') != '0'
|
Use env variable to indicate installer
|
Use env variable to indicate installer
|
Python
|
bsd-2-clause
|
xenserver/python-libs,xenserver/python-libs
|
#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os.path
EXTRA_SCRIPTS_DIR = '/tmp/extra-scripts'
def installerRunning():
return os.path.isdir(EXTRA_SCRIPTS_DIR)
Use env variable to indicate installer
|
#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os
EXTRA_SCRIPTS_DIR = '/mnt'
def installerRunning():
return os.environ.get('XS_INSTALLATION', '0') != '0'
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os.path
EXTRA_SCRIPTS_DIR = '/tmp/extra-scripts'
def installerRunning():
return os.path.isdir(EXTRA_SCRIPTS_DIR)
<commit_msg>Use env variable to indicate installer<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os
EXTRA_SCRIPTS_DIR = '/mnt'
def installerRunning():
return os.environ.get('XS_INSTALLATION', '0') != '0'
|
#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os.path
EXTRA_SCRIPTS_DIR = '/tmp/extra-scripts'
def installerRunning():
return os.path.isdir(EXTRA_SCRIPTS_DIR)
Use env variable to indicate installer#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os
EXTRA_SCRIPTS_DIR = '/mnt'
def installerRunning():
return os.environ.get('XS_INSTALLATION', '0') != '0'
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os.path
EXTRA_SCRIPTS_DIR = '/tmp/extra-scripts'
def installerRunning():
return os.path.isdir(EXTRA_SCRIPTS_DIR)
<commit_msg>Use env variable to indicate installer<commit_after>#!/usr/bin/env python
# Copyright (c) 2012 Citrix Systems, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; version 2.1 only. with the special
# exception on linking described in file LICENSE.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
import os
EXTRA_SCRIPTS_DIR = '/mnt'
def installerRunning():
return os.environ.get('XS_INSTALLATION', '0') != '0'
|
ce993da55a2fdcaeaf935703f0b895cae075a870
|
example.py
|
example.py
|
#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
|
#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
entry['comment_intl']=u'Testing... \xb0'
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
|
Add a unicode value in a comment_intl value.
|
Add a unicode value in a comment_intl value.
|
Python
|
bsd-2-clause
|
K6BSD/Py-ADIF
|
#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
Add a unicode value in a comment_intl value.
|
#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
entry['comment_intl']=u'Testing... \xb0'
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
|
<commit_before>#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
<commit_msg>Add a unicode value in a comment_intl value.<commit_after>
|
#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
entry['comment_intl']=u'Testing... \xb0'
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
|
#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
Add a unicode value in a comment_intl value.#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
entry['comment_intl']=u'Testing... \xb0'
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
|
<commit_before>#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
<commit_msg>Add a unicode value in a comment_intl value.<commit_after>#!/usr/bin/python
from ADIF_log import ADIF_log
import datetime
import os
# Create a new log...
log = ADIF_log('Py-ADIF Example')
entry = log.newEntry()
# New entry from K6BSD to WD1CKS
entry['OPerator'] = 'K6BSD'
entry['Call'] = 'WD1CKS'
entry['QSO_Date']=datetime.datetime.now().strftime('%Y%m%d')
entry['baNd']='20M'
entry['mODe']='PSK'
entry['SubMode']='PSK31'
entry['TIME_ON']=datetime.datetime.now().strftime('%H%M')
entry['comment_intl']=u'Testing... \xb0'
# Write to example.adif
f = open('example.adif', 'wt')
f.write(str(log))
f.close()
# Write to example.adx
f = open('example.adx', 'wt')
f.write(log.xml())
f.close()
# Read example.adif back...
newlog = ADIF_log('Py-ADIF Example', file='example.adif')
print newlog[0]['CALL'],' band: ',newlog[0]['BAND']
# Read example.adx back...
newlog = ADIF_log('Py-ADIF Example', file='example.adx')
print newlog[0]['call'],' band: ',newlog[0]['band']
# Clean up... nothing interesting here...
os.remove('example.adif')
os.remove('example.adx')
|
e386b013b4c0124c623bd99dcb1a1d01b6e6bd86
|
supriya/__init__.py
|
supriya/__init__.py
|
# -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
# -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
Add play, render and Say to toplevel namespace.
|
Add play, render and Say to toplevel namespace.
|
Python
|
mit
|
Pulgama/supriya,Pulgama/supriya,Pulgama/supriya,josiah-wolf-oberholtzer/supriya,Pulgama/supriya
|
# -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
Add play, render and Say to toplevel namespace.
|
# -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
<commit_before># -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
<commit_msg>Add play, render and Say to toplevel namespace.<commit_after>
|
# -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
# -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
Add play, render and Say to toplevel namespace.# -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
<commit_before># -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction, Buffer, BufferGroup, Bus, BusGroup, Group, Server, Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat, SampleFormat, SoundFile,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate, DoneAction, Range, SynthDef, SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets, SupriyaConfiguration,
)
from abjad.tools.topleveltools import ( # noqa
graph, new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
<commit_msg>Add play, render and Say to toplevel namespace.<commit_after># -*- encoding: utf -*-
import pyximport
pyximport.install()
from supriya.tools import * # noqa
from supriya.tools.bindingtools import bind # noqa
from supriya.tools.nonrealtimetools import Session # noqa
from supriya.tools.servertools import ( # noqa
AddAction,
Buffer,
BufferGroup,
Bus,
BusGroup,
Group,
Server,
Synth,
)
from supriya.tools.soundfiletools import ( # noqa
HeaderFormat,
SampleFormat,
SoundFile,
play,
render,
)
from supriya.tools.synthdeftools import ( # noqa
CalculationRate,
DoneAction,
Envelope,
Range,
SynthDef,
SynthDefBuilder,
)
from supriya.tools.systemtools import ( # noqa
Assets,
Profiler,
SupriyaConfiguration,
)
from supriya.tools.wrappertools import ( # noqa
Say,
)
from abjad.tools.topleveltools import ( # noqa
graph,
new,
)
from supriya import synthdefs # noqa
__version__ = 0.1
supriya_configuration = SupriyaConfiguration()
del SupriyaConfiguration
|
6a5ffc12a16c0795716d1a316f051541e674fd75
|
irrigator_pro/notifications/models.py
|
irrigator_pro/notifications/models.py
|
from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['SMS', 'Email']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
|
from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['Email', 'SMS']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
|
Change order of notification types so that email is shown first and becomes the default
|
Change order of notification types so that email is shown first and becomes the default
|
Python
|
mit
|
warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro
|
from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['SMS', 'Email']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
Change order of notification types so that email is shown first and becomes the default
|
from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['Email', 'SMS']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['SMS', 'Email']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
<commit_msg>Change order of notification types so that email is shown first and becomes the default<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['Email', 'SMS']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
|
from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['SMS', 'Email']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
Change order of notification types so that email is shown first and becomes the defaultfrom django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['Email', 'SMS']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['SMS', 'Email']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
<commit_msg>Change order of notification types so that email is shown first and becomes the default<commit_after>from django.db import models
from django.contrib.auth.models import User
from common.models import Audit, Comment
from farms.models import CropSeason, Field
########################################################
### NotificationsRule
###
### Connect notification info with a Field, CropSeason
########################################################
class NotificationsRule(Comment, Audit):
# from Comment: comment
# from Audit: cdate, cuser, mdate, muser
NOTIFICATION_TYPE_VALUES = ['Email', 'SMS']
LEVEL_CHOICES = ['Daily', 'Any Flag', 'Irrigate Today', 'None']
field_list = models.ManyToManyField(Field)
recipients = models.ManyToManyField(User)
level = models.CharField(max_length = 15)
notification_type = models.CharField(max_length = 15)
|
2b479927ee33181c57081df941bfdf347cd45423
|
test/test_serenata_de_amor.py
|
test/test_serenata_de_amor.py
|
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def test_it_works(self):
self.assertEqual(4, 2 + 2)
self.assertNotEqual(5, 2 + 2)
|
import glob
import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def setUp(self):
self.notebook_files = glob.glob('develop/*.ipynb')
def test_html_versions_present(self):
"""There is a *.html version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.html')
for filename in self.notebook_files]
html_files = glob.glob('develop/*.html')
self.assertEqual(expected, html_files)
def test_py_versions_present(self):
"""There is a *.py version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.py')
for filename in self.notebook_files]
py_files = glob.glob('develop/*.py')
self.assertEqual(expected, py_files)
|
Verify existence of *.html and *.py versions for every notebook
|
Verify existence of *.html and *.py versions for every notebook
|
Python
|
mit
|
marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor
|
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def test_it_works(self):
self.assertEqual(4, 2 + 2)
self.assertNotEqual(5, 2 + 2)
Verify existence of *.html and *.py versions for every notebook
|
import glob
import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def setUp(self):
self.notebook_files = glob.glob('develop/*.ipynb')
def test_html_versions_present(self):
"""There is a *.html version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.html')
for filename in self.notebook_files]
html_files = glob.glob('develop/*.html')
self.assertEqual(expected, html_files)
def test_py_versions_present(self):
"""There is a *.py version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.py')
for filename in self.notebook_files]
py_files = glob.glob('develop/*.py')
self.assertEqual(expected, py_files)
|
<commit_before>from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def test_it_works(self):
self.assertEqual(4, 2 + 2)
self.assertNotEqual(5, 2 + 2)
<commit_msg>Verify existence of *.html and *.py versions for every notebook<commit_after>
|
import glob
import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def setUp(self):
self.notebook_files = glob.glob('develop/*.ipynb')
def test_html_versions_present(self):
"""There is a *.html version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.html')
for filename in self.notebook_files]
html_files = glob.glob('develop/*.html')
self.assertEqual(expected, html_files)
def test_py_versions_present(self):
"""There is a *.py version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.py')
for filename in self.notebook_files]
py_files = glob.glob('develop/*.py')
self.assertEqual(expected, py_files)
|
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def test_it_works(self):
self.assertEqual(4, 2 + 2)
self.assertNotEqual(5, 2 + 2)
Verify existence of *.html and *.py versions for every notebookimport glob
import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def setUp(self):
self.notebook_files = glob.glob('develop/*.ipynb')
def test_html_versions_present(self):
"""There is a *.html version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.html')
for filename in self.notebook_files]
html_files = glob.glob('develop/*.html')
self.assertEqual(expected, html_files)
def test_py_versions_present(self):
"""There is a *.py version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.py')
for filename in self.notebook_files]
py_files = glob.glob('develop/*.py')
self.assertEqual(expected, py_files)
|
<commit_before>from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def test_it_works(self):
self.assertEqual(4, 2 + 2)
self.assertNotEqual(5, 2 + 2)
<commit_msg>Verify existence of *.html and *.py versions for every notebook<commit_after>import glob
import subprocess
from unittest import TestCase
class TestSerenataDeAmor(TestCase):
def setUp(self):
self.notebook_files = glob.glob('develop/*.ipynb')
def test_html_versions_present(self):
"""There is a *.html version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.html')
for filename in self.notebook_files]
html_files = glob.glob('develop/*.html')
self.assertEqual(expected, html_files)
def test_py_versions_present(self):
"""There is a *.py version of every Jupyter notebook."""
expected = [filename.replace('.ipynb', '.py')
for filename in self.notebook_files]
py_files = glob.glob('develop/*.py')
self.assertEqual(expected, py_files)
|
16594fca785e0f726b9bcc3ca3c0a45d2ac758dc
|
get_study_attachments.py
|
get_study_attachments.py
|
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
import sys
import boto3
import botocore
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
"""
Get all video responses to a study by fetching all objects in the bucket with
key name videoStream_<study_uuid>
"""
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
def get_download_url(video_key):
"""
Generate a presigned url for the video that expires in 60 seconds.
"""
s3Client = boto3.client('s3')
return s3Client.generate_presigned_url('get_object', Params = {'Bucket': BUCKET_NAME, 'Key': video_key}, ExpiresIn = 60)
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
Add function that gets presigned url for video.
|
Add function that gets presigned url for video.
|
Python
|
apache-2.0
|
pattisdr/lookit-api,CenterForOpenScience/lookit-api,pattisdr/lookit-api,CenterForOpenScience/lookit-api,pattisdr/lookit-api,CenterForOpenScience/lookit-api
|
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
Add function that gets presigned url for video.
|
import sys
import boto3
import botocore
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
"""
Get all video responses to a study by fetching all objects in the bucket with
key name videoStream_<study_uuid>
"""
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
def get_download_url(video_key):
"""
Generate a presigned url for the video that expires in 60 seconds.
"""
s3Client = boto3.client('s3')
return s3Client.generate_presigned_url('get_object', Params = {'Bucket': BUCKET_NAME, 'Key': video_key}, ExpiresIn = 60)
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
<commit_before>import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
<commit_msg>Add function that gets presigned url for video.<commit_after>
|
import sys
import boto3
import botocore
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
"""
Get all video responses to a study by fetching all objects in the bucket with
key name videoStream_<study_uuid>
"""
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
def get_download_url(video_key):
"""
Generate a presigned url for the video that expires in 60 seconds.
"""
s3Client = boto3.client('s3')
return s3Client.generate_presigned_url('get_object', Params = {'Bucket': BUCKET_NAME, 'Key': video_key}, ExpiresIn = 60)
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
Add function that gets presigned url for video.import sys
import boto3
import botocore
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
"""
Get all video responses to a study by fetching all objects in the bucket with
key name videoStream_<study_uuid>
"""
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
def get_download_url(video_key):
"""
Generate a presigned url for the video that expires in 60 seconds.
"""
s3Client = boto3.client('s3')
return s3Client.generate_presigned_url('get_object', Params = {'Bucket': BUCKET_NAME, 'Key': video_key}, ExpiresIn = 60)
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
<commit_before>import sys
import boto3
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
<commit_msg>Add function that gets presigned url for video.<commit_after>import sys
import boto3
import botocore
BUCKET_NAME = 'mitLookit'
def get_all_study_attachments(study_uuid):
"""
Get all video responses to a study by fetching all objects in the bucket with
key name videoStream_<study_uuid>
"""
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
return bucket.objects.filter(Prefix=f'videoStream_{study_uuid}')
def get_download_url(video_key):
"""
Generate a presigned url for the video that expires in 60 seconds.
"""
s3Client = boto3.client('s3')
return s3Client.generate_presigned_url('get_object', Params = {'Bucket': BUCKET_NAME, 'Key': video_key}, ExpiresIn = 60)
if __name__ == '__main__':
study_uuid = sys.argv[1]
get_study_keys(study_uuid)
|
47bf4aa44342acc030d5cc2047d571b93b4f8de3
|
ts3npl.py
|
ts3npl.py
|
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
self.target_channel = '#teamspeak'
@cron('* * * * *')
def fetch_status(self):
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
config = bot.config.get('ts3npl', {})
self.target_channel = config.get('channel')
@cron('* * * * *')
def fetch_status(self):
print('checking status')
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status \
and self.target_channel:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
Use config instead of hardcoding target channel
|
Use config instead of hardcoding target channel
|
Python
|
mit
|
Thor77/TeamspeakIRC
|
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
self.target_channel = '#teamspeak'
@cron('* * * * *')
def fetch_status(self):
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
Use config instead of hardcoding target channel
|
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
config = bot.config.get('ts3npl', {})
self.target_channel = config.get('channel')
@cron('* * * * *')
def fetch_status(self):
print('checking status')
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status \
and self.target_channel:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
<commit_before>from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
self.target_channel = '#teamspeak'
@cron('* * * * *')
def fetch_status(self):
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
<commit_msg>Use config instead of hardcoding target channel<commit_after>
|
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
config = bot.config.get('ts3npl', {})
self.target_channel = config.get('channel')
@cron('* * * * *')
def fetch_status(self):
print('checking status')
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status \
and self.target_channel:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
self.target_channel = '#teamspeak'
@cron('* * * * *')
def fetch_status(self):
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
Use config instead of hardcoding target channelfrom irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
config = bot.config.get('ts3npl', {})
self.target_channel = config.get('channel')
@cron('* * * * *')
def fetch_status(self):
print('checking status')
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status \
and self.target_channel:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
<commit_before>from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
self.target_channel = '#teamspeak'
@cron('* * * * *')
def fetch_status(self):
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
<commit_msg>Use config instead of hardcoding target channel<commit_after>from irc3.plugins.command import command
from irc3.plugins.cron import cron
import irc3
from teamspeak_web_utils import nplstatus
@irc3.plugin
class TS3NPL(object):
def __init__(self, bot):
self.bot = bot
self.npl_status = None
config = bot.config.get('ts3npl', {})
self.target_channel = config.get('channel')
@cron('* * * * *')
def fetch_status(self):
print('checking status')
new_status = nplstatus()
if self.npl_status is not None and new_status != self.npl_status \
and self.target_channel:
if new_status:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now open!')
else:
self.bot.privmsg(self.target_channel,
'NPL-Registrations are now closed!')
self.npl_status = new_status
@command(permission='view')
def nplstatus(self, mask, target, args):
'''check Teamspeak3 NPL-Registration-status
%%nplstatus
'''
if self.npl_status is None:
self.npl_status = nplstatus()
if self.npl_status:
return 'NPL-Registrations are currently open!'
else:
return 'NPL-Registrations are currently closed!'
|
d9e922075a322cef75b7b75e62abdd539212bebd
|
twitter/util.py
|
twitter/util.py
|
"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
|
"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
unichr = chr
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
|
Set unichr = chr for Python 3.
|
Set unichr = chr for Python 3.
|
Python
|
mit
|
sixohsix/twitter,miragshin/twitter,Adai0808/twitter,adonoho/twitter,hugovk/twitter,jessamynsmith/twitter,tytek2012/twitter
|
"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
Set unichr = chr for Python 3.
|
"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
unichr = chr
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
|
<commit_before>"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
<commit_msg>Set unichr = chr for Python 3.<commit_after>
|
"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
unichr = chr
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
|
"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
Set unichr = chr for Python 3."""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
unichr = chr
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
|
<commit_before>"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
<commit_msg>Set unichr = chr for Python 3.<commit_after>"""
Internal utility functions.
`htmlentitydecode` came from here:
http://wiki.python.org/moin/EscapingHtml
"""
import re
import sys
try:
from html.entities import name2codepoint
unichr = chr
except ImportError:
from htmlentitydefs import name2codepoint
def htmlentitydecode(s):
return re.sub(
'&(%s);' % '|'.join(name2codepoint),
lambda m: unichr(name2codepoint[m.group(1)]), s)
def smrt_input(globals_, locals_, ps1=">>> ", ps2="... "):
inputs = []
while True:
if inputs:
prompt = ps2
else:
prompt = ps1
inputs.append(input(prompt))
try:
ret = eval('\n'.join(inputs), globals_, locals_)
if ret:
print(str(ret))
return
except SyntaxError:
pass
def printNicely(string):
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(string.encode('utf8'))
print()
else:
print(string.encode('utf8'))
__all__ = ["htmlentitydecode", "smrt_input"]
|
b917a5ba927a09de7e916978e3689a23ccd035ac
|
count_word_api/helpers/text.py
|
count_word_api/helpers/text.py
|
from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
|
from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
"""Receive text and remove punctuation"""
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
|
Add missing docstring on remove_punctuation
|
Add missing docstring on remove_punctuation
|
Python
|
mit
|
rafaelhenrique/count_word_api
|
from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
Add missing docstring on remove_punctuation
|
from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
"""Receive text and remove punctuation"""
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
|
<commit_before>from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
<commit_msg>Add missing docstring on remove_punctuation<commit_after>
|
from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
"""Receive text and remove punctuation"""
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
|
from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
Add missing docstring on remove_punctuationfrom bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
"""Receive text and remove punctuation"""
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
|
<commit_before>from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
<commit_msg>Add missing docstring on remove_punctuation<commit_after>from bs4 import BeautifulSoup
def html_to_text(html_text):
"""Receive pure html and convert to text without tags"""
soup = BeautifulSoup(html_text, "html.parser")
clean_html = ' '.join(soup.find_all(text=True))
return clean_html
def remove_punctuation(text):
"""Receive text and remove punctuation"""
translate_table = {ord(c): ""
for c in "!@#$%^&*()[]{}';:,./<>?\|`~-=_+\""}
return text.translate(translate_table)
|
a09822a4a8422493c5cb98eb9518ab0112c565d7
|
techism2/service.py
|
techism2/service.py
|
from techism2.models import Event
from datetime import datetime
def get_tags():
# TODO: cache, use django cache which uses GAE memcache
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
from techism2.models import Event
from datetime import datetime
from django.core.cache import cache
tags_cache_key = "tags"
def get_tags():
# Note: no synchronization, propably not possible on GAE
tags = cache.get(tags_cache_key)
if tags:
return tags
else:
tags = __fetch_tags()
cache.set(tags_cache_key, tags, 1800) # expire after 30 min
return tags
def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
Use Memcache to cache tags
|
Use Memcache to cache tags
|
Python
|
apache-2.0
|
gimler/techism2,gimler/techism2
|
from techism2.models import Event
from datetime import datetime
def get_tags():
# TODO: cache, use django cache which uses GAE memcache
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
Use Memcache to cache tags
|
from techism2.models import Event
from datetime import datetime
from django.core.cache import cache
tags_cache_key = "tags"
def get_tags():
# Note: no synchronization, propably not possible on GAE
tags = cache.get(tags_cache_key)
if tags:
return tags
else:
tags = __fetch_tags()
cache.set(tags_cache_key, tags, 1800) # expire after 30 min
return tags
def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
<commit_before>from techism2.models import Event
from datetime import datetime
def get_tags():
# TODO: cache, use django cache which uses GAE memcache
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
<commit_msg>Use Memcache to cache tags<commit_after>
|
from techism2.models import Event
from datetime import datetime
from django.core.cache import cache
tags_cache_key = "tags"
def get_tags():
# Note: no synchronization, propably not possible on GAE
tags = cache.get(tags_cache_key)
if tags:
return tags
else:
tags = __fetch_tags()
cache.set(tags_cache_key, tags, 1800) # expire after 30 min
return tags
def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
from techism2.models import Event
from datetime import datetime
def get_tags():
# TODO: cache, use django cache which uses GAE memcache
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
Use Memcache to cache tagsfrom techism2.models import Event
from datetime import datetime
from django.core.cache import cache
tags_cache_key = "tags"
def get_tags():
# Note: no synchronization, propably not possible on GAE
tags = cache.get(tags_cache_key)
if tags:
return tags
else:
tags = __fetch_tags()
cache.set(tags_cache_key, tags, 1800) # expire after 30 min
return tags
def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
<commit_before>from techism2.models import Event
from datetime import datetime
def get_tags():
# TODO: cache, use django cache which uses GAE memcache
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
<commit_msg>Use Memcache to cache tags<commit_after>from techism2.models import Event
from datetime import datetime
from django.core.cache import cache
tags_cache_key = "tags"
def get_tags():
# Note: no synchronization, propably not possible on GAE
tags = cache.get(tags_cache_key)
if tags:
return tags
else:
tags = __fetch_tags()
cache.set(tags_cache_key, tags, 1800) # expire after 30 min
return tags
def __fetch_tags():
dict_list = Event.objects.values('tags')
tags = dict()
for dictionary in dict_list:
for tag_list in dictionary.itervalues():
if tag_list:
for tag in tag_list:
if tag not in tags:
tags[tag] = 0
tags[tag] += 1
return tags
|
37791c55a6e7e27913498e37a9af8098fe1c6aaa
|
tools/build_interface_docs.py
|
tools/build_interface_docs.py
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'\.testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
Remove testing dir from interface doc generation.
|
Remove testing dir from interface doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1390 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
Python
|
bsd-3-clause
|
mick-d/nipype,FCP-INDI/nipype,glatard/nipype,FCP-INDI/nipype,gerddie/nipype,satra/NiPypeold,pearsonlab/nipype,dmordom/nipype,pearsonlab/nipype,fprados/nipype,JohnGriffiths/nipype,blakedewey/nipype,glatard/nipype,gerddie/nipype,dgellis90/nipype,sgiavasis/nipype,arokem/nipype,rameshvs/nipype,blakedewey/nipype,dgellis90/nipype,carolFrohlich/nipype,carolFrohlich/nipype,dgellis90/nipype,arokem/nipype,blakedewey/nipype,dmordom/nipype,Leoniela/nipype,JohnGriffiths/nipype,dgellis90/nipype,carlohamalainen/nipype,arokem/nipype,gerddie/nipype,mick-d/nipype_source,FCP-INDI/nipype,grlee77/nipype,glatard/nipype,sgiavasis/nipype,iglpdc/nipype,pearsonlab/nipype,FredLoney/nipype,grlee77/nipype,FredLoney/nipype,FredLoney/nipype,pearsonlab/nipype,carlohamalainen/nipype,blakedewey/nipype,iglpdc/nipype,glatard/nipype,JohnGriffiths/nipype,arokem/nipype,fprados/nipype,sgiavasis/nipype,rameshvs/nipype,iglpdc/nipype,Leoniela/nipype,mick-d/nipype_source,mick-d/nipype,carlohamalainen/nipype,iglpdc/nipype,mick-d/nipype,carolFrohlich/nipype,carolFrohlich/nipype,christianbrodbeck/nipype,rameshvs/nipype,wanderine/nipype,wanderine/nipype,satra/NiPypeold,mick-d/nipype,rameshvs/nipype,sgiavasis/nipype,grlee77/nipype,Leoniela/nipype,JohnGriffiths/nipype,mick-d/nipype_source,dmordom/nipype,FCP-INDI/nipype,fprados/nipype,grlee77/nipype,christianbrodbeck/nipype,gerddie/nipype,wanderine/nipype,wanderine/nipype
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
Remove testing dir from interface doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1390 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'\.testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
<commit_before>#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
<commit_msg>Remove testing dir from interface doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1390 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'\.testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
Remove testing dir from interface doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1390 ead46cd0-7350-4e37-8683-fc4c6f79bf00#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'\.testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
<commit_before>#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'.\testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
<commit_msg>Remove testing dir from interface doc generation.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1390 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>#!/usr/bin/env python
"""Script to auto-generate interface docs.
"""
# stdlib imports
import os
import sys
#*****************************************************************************
if __name__ == '__main__':
nipypepath = os.path.abspath('..')
sys.path.insert(1,nipypepath)
# local imports
from interfacedocgen import InterfaceHelpWriter
package = 'nipype'
outdir = os.path.join('interfaces','generated')
docwriter = InterfaceHelpWriter(package)
# Packages that should not be included in generated API docs.
docwriter.package_skip_patterns += ['\.externals$',
'\.utils$',
'\.pipeline',
'\.testing',
]
# Modules that should not be included in generated API docs.
docwriter.module_skip_patterns += ['\.version$',
'\.interfaces\.afni$',
'\.interfaces\.base$',
'\.interfaces\.matlab$',
'\.interfaces\.rest$',
'\.interfaces\.pymvpa$',
'\.interfaces\.traits',
'\.pipeline\.alloy$',
'\.pipeline\.s3_node_wrapper$',
'.\testing',
]
docwriter.class_skip_patterns += ['FSL',
'FS',
'Spm',
'Tester',
'Spec$',
'afni',
'Numpy'
# NipypeTester raises an
# exception when instantiated in
# InterfaceHelpWriter.generate_api_doc
'NipypeTester',
]
docwriter.write_api_docs(outdir)
docwriter.write_index(outdir, 'gen', relative_to='interfaces')
print '%d files written' % len(docwriter.written_modules)
|
523ac69174cf3a02c3054dfbe04dc461cc8d60ff
|
src/ggrc_basic_permissions/migrations/versions/20150805105543_99925466d6e_add_roles_order_column.py
|
src/ggrc_basic_permissions/migrations/versions/20150805105543_99925466d6e_add_roles_order_column.py
|
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
|
Add headers to migrations in ggrc_basic_permissions
|
Add headers to migrations in ggrc_basic_permissions
|
Python
|
apache-2.0
|
plamut/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core
|
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
Add headers to migrations in ggrc_basic_permissions
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
|
<commit_before>
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
<commit_msg>Add headers to migrations in ggrc_basic_permissions<commit_after>
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
|
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
Add headers to migrations in ggrc_basic_permissions# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
|
<commit_before>
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
<commit_msg>Add headers to migrations in ggrc_basic_permissions<commit_after># Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
"""Add roles order column
Revision ID: 99925466d6e
Revises: 401fb7f0184b
Create Date: 2015-08-05 10:55:43.992382
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '99925466d6e'
down_revision = '401fb7f0184b'
def upgrade():
op.add_column("roles", sa.Column("role_order", sa.Integer(), nullable=True))
op.execute("UPDATE roles SET role_order = id")
# creator role should appear before other roles
op.execute("UPDATE roles SET role_order = 4 WHERE name='Creator'")
def downgrade():
op.drop_column("roles", "role_order")
|
5121a036b12a1e4b7eafb21cfbcf8c5cb39d9803
|
script.py
|
script.py
|
import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO
if __name__ == '__main__':
main()
|
import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO: given a string as shown in the comment, extract the number in it, possibly with regex.
if __name__ == '__main__':
main()
|
Add TODO comment for get_data_index
|
Add TODO comment for get_data_index
|
Python
|
mit
|
liuisaiah/Hack-Brown2017,LWprogramming/Hack-Brown2017
|
import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO
if __name__ == '__main__':
main()
Add TODO comment for get_data_index
|
import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO: given a string as shown in the comment, extract the number in it, possibly with regex.
if __name__ == '__main__':
main()
|
<commit_before>import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO
if __name__ == '__main__':
main()
<commit_msg>Add TODO comment for get_data_index<commit_after>
|
import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO: given a string as shown in the comment, extract the number in it, possibly with regex.
if __name__ == '__main__':
main()
|
import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO
if __name__ == '__main__':
main()
Add TODO comment for get_data_indeximport numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO: given a string as shown in the comment, extract the number in it, possibly with regex.
if __name__ == '__main__':
main()
|
<commit_before>import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO
if __name__ == '__main__':
main()
<commit_msg>Add TODO comment for get_data_index<commit_after>import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO: given a string as shown in the comment, extract the number in it, possibly with regex.
if __name__ == '__main__':
main()
|
b7f04943617d45ee188869b7e2e037c63e10ae6b
|
main.py
|
main.py
|
#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = '0.0.0.0', debug = True)
|
#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = sys.argv[1] if len(sys.argv) > 1 else None, debug = True)
|
Allow choosing which address to listen on from command-line
|
Allow choosing which address to listen on from command-line
Closes #12
|
Python
|
agpl-3.0
|
hhm0/supysonic,nwokeo/supysonic,spl0k/supysonic,spl0k/supysonic,nwokeo/supysonic,ezpuzz/supysonic,spl0k/supysonic,nwokeo/supysonic,nwokeo/supysonic,hhm0/supysonic,hhm0/supysonic,nwokeo/supysonic,ezpuzz/supysonic
|
#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = '0.0.0.0', debug = True)
Allow choosing which address to listen on from command-line
Closes #12
|
#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = sys.argv[1] if len(sys.argv) > 1 else None, debug = True)
|
<commit_before>#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = '0.0.0.0', debug = True)
<commit_msg>Allow choosing which address to listen on from command-line
Closes #12<commit_after>
|
#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = sys.argv[1] if len(sys.argv) > 1 else None, debug = True)
|
#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = '0.0.0.0', debug = True)
Allow choosing which address to listen on from command-line
Closes #12#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = sys.argv[1] if len(sys.argv) > 1 else None, debug = True)
|
<commit_before>#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = '0.0.0.0', debug = True)
<commit_msg>Allow choosing which address to listen on from command-line
Closes #12<commit_after>#!/usr/bin/python
# coding: utf-8
# This file is part of Supysonic.
#
# Supysonic is a Python implementation of the Subsonic server API.
# Copyright (C) 2013 Alban 'spl0k' Féron
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import config
import os.path, sys
if __name__ == '__main__':
if not config.check():
sys.exit(1)
if not os.path.exists(config.get('base', 'cache_dir')):
os.makedirs(config.get('base', 'cache_dir'))
import db
from web import app
db.init_db()
app.run(host = sys.argv[1] if len(sys.argv) > 1 else None, debug = True)
|
07ba597a106e60a77ec28debd093079daa55df8f
|
node.py
|
node.py
|
class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
|
class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
# A calculated value
self.value = None
|
Add value property to class Node
|
Add value property to class Node
value is the calculated output of a Node.
|
Python
|
mit
|
YabinHu/miniflow
|
class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
Add value property to class Node
value is the calculated output of a Node.
|
class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
# A calculated value
self.value = None
|
<commit_before>class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
<commit_msg>Add value property to class Node
value is the calculated output of a Node.<commit_after>
|
class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
# A calculated value
self.value = None
|
class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
Add value property to class Node
value is the calculated output of a Node.class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
# A calculated value
self.value = None
|
<commit_before>class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
<commit_msg>Add value property to class Node
value is the calculated output of a Node.<commit_after>class Node(object):
def __init__(self):
# Node(s) from which this Node receives values
self.inbound_nodes = inbound_nodes
# Node(s) to which this Node passes values
self.outbound_nodes = []
# For each inbound Node here, add this Node as an outbound to that Node.
for n in self.inbound_nodes:
n.outbound_nodes.append(self)
# A calculated value
self.value = None
|
b0c93651c6d0d48394041ea61dea3774f6e017af
|
ircnotifier/redis2irc.py
|
ircnotifier/redis2irc.py
|
#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME')])
message = json.loads(future.value)
channels = message['channels']
message = message['message']
# FIXME: Actually join channel if they aren't joined already
# FIXME: Actually send message, yo!
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
self.joined_channels = set()
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME', 'ircnotifier')])
message = json.loads(future.value)
channels = set(message['channels'])
message = message['message']
to_join = channels.difference(self.joined_channels)
for chan in to_join:
self.join(chan)
for chan in channels:
self.privmsg(chan, message)
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
|
Join channels where the bot isn't already on
|
Join channels where the bot isn't already on
|
Python
|
apache-2.0
|
wikimedia/operations-software-ircyall,yuvipanda/ircnotifier
|
#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME')])
message = json.loads(future.value)
channels = message['channels']
message = message['message']
# FIXME: Actually join channel if they aren't joined already
# FIXME: Actually send message, yo!
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
if __name__ == '__main__':
main()
Join channels where the bot isn't already on
|
#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
self.joined_channels = set()
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME', 'ircnotifier')])
message = json.loads(future.value)
channels = set(message['channels'])
message = message['message']
to_join = channels.difference(self.joined_channels)
for chan in to_join:
self.join(chan)
for chan in channels:
self.privmsg(chan, message)
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
|
<commit_before>#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME')])
message = json.loads(future.value)
channels = message['channels']
message = message['message']
# FIXME: Actually join channel if they aren't joined already
# FIXME: Actually send message, yo!
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
if __name__ == '__main__':
main()
<commit_msg>Join channels where the bot isn't already on<commit_after>
|
#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
self.joined_channels = set()
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME', 'ircnotifier')])
message = json.loads(future.value)
channels = set(message['channels'])
message = message['message']
to_join = channels.difference(self.joined_channels)
for chan in to_join:
self.join(chan)
for chan in channels:
self.privmsg(chan, message)
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
|
#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME')])
message = json.loads(future.value)
channels = message['channels']
message = message['message']
# FIXME: Actually join channel if they aren't joined already
# FIXME: Actually send message, yo!
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
if __name__ == '__main__':
main()
Join channels where the bot isn't already on#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
self.joined_channels = set()
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME', 'ircnotifier')])
message = json.loads(future.value)
channels = set(message['channels'])
message = message['message']
to_join = channels.difference(self.joined_channels)
for chan in to_join:
self.join(chan)
for chan in channels:
self.privmsg(chan, message)
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
|
<commit_before>#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME')])
message = json.loads(future.value)
channels = message['channels']
message = message['message']
# FIXME: Actually join channel if they aren't joined already
# FIXME: Actually send message, yo!
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
if __name__ == '__main__':
main()
<commit_msg>Join channels where the bot isn't already on<commit_after>#!/usr/bin/env python
import asyncio
import asyncio_redis
import asyncio_redis.encoders
import json
import irc3
import traceback
__version__ = '3.0alpha'
class Redis2Irc(irc3.IrcBot):
def __init__(self, conf, **kwargs):
"""
:type conf: dict
"""
super(Redis2Irc, self).__init__(**kwargs)
self._conf = conf
self.joined_channels = set()
@property
def conf(self):
return self._conf
@asyncio.coroutine
def start(self):
while True:
try:
yield from self.process_message()
except Exception:
self.log.critical(traceback.format_exc())
self.log.info("...restarting Redis listener in a few seconds.")
yield from asyncio.sleep(5)
@asyncio.coroutine
def process_message(self):
# Create connection
connection = yield from asyncio_redis.Connection.create(
host=self.conf.get('REDIS_HOST', 'localhost'),
port=6379,
)
while True:
try:
future = yield from connection.blpop([self.conf.get('REDIS_QUEUE_NAME', 'ircnotifier')])
message = json.loads(future.value)
channels = set(message['channels'])
message = message['message']
to_join = channels.difference(self.joined_channels)
for chan in to_join:
self.join(chan)
for chan in channels:
self.privmsg(chan, message)
except:
self.log.critical(traceback.format_exc())
yield from asyncio.sleep(1)
|
c170e8a73bea60bba9fdf9a004f922dfeb6c5715
|
Tools/compiler/doc/astdocgen.py
|
Tools/compiler/doc/astdocgen.py
|
# Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{%s}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
|
# Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{\\member{%s}}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
|
Change the generated markup so that attribute names are properly marked.
|
Change the generated markup so that attribute names are properly marked.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
# Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{%s}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
Change the generated markup so that attribute names are properly marked.
|
# Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{\\member{%s}}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
|
<commit_before># Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{%s}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
<commit_msg>Change the generated markup so that attribute names are properly marked.<commit_after>
|
# Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{\\member{%s}}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
|
# Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{%s}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
Change the generated markup so that attribute names are properly marked.# Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{\\member{%s}}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
|
<commit_before># Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{%s}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
<commit_msg>Change the generated markup so that attribute names are properly marked.<commit_after># Lame substitute for a fine script to generate the table from ast.txt
from compiler import astgen
AST_DEF = '../compiler/ast.txt'
def sort(l):
l = l[:]
l.sort(lambda a, b: cmp(a.name, b.name))
return l
def main():
nodes = astgen.parse_spec(AST_DEF)
print "\\begin{longtableiii}{lll}{class}{Node type}{Attribute}{Value}"
print
for node in sort(nodes):
if node.argnames:
print "\\lineiii{%s}{%s}{}" % (node.name, node.argnames[0])
else:
print "\\lineiii{%s}{}{}" % node.name
for arg in node.argnames[1:]:
print "\\lineiii{}{\\member{%s}}{}" % arg
print "\\hline", "\n"
print "\\end{longtableiii}"
if __name__ == "__main__":
main()
|
d47d6bd812a6f335bc369cccc0539585ff7aeff3
|
endless_pagination/loaders.py
|
endless_pagination/loaders.py
|
"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
|
"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
|
Use importlib instead of django.utils.importlib
|
Use importlib instead of django.utils.importlib
django.utils.importlib is deprecated, and removed in Django 1.9
|
Python
|
mit
|
catalpainternational/django-endless-pagination,catalpainternational/django-endless-pagination,catalpainternational/django-endless-pagination,catalpainternational/django-endless-pagination
|
"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
Use importlib instead of django.utils.importlib
django.utils.importlib is deprecated, and removed in Django 1.9
|
"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
|
<commit_before>"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
<commit_msg>Use importlib instead of django.utils.importlib
django.utils.importlib is deprecated, and removed in Django 1.9<commit_after>
|
"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
|
"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
Use importlib instead of django.utils.importlib
django.utils.importlib is deprecated, and removed in Django 1.9"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
|
<commit_before>"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
<commit_msg>Use importlib instead of django.utils.importlib
django.utils.importlib is deprecated, and removed in Django 1.9<commit_after>"""Django Endless Pagination object loaders."""
from __future__ import unicode_literals
from django.core.exceptions import ImproperlyConfigured
from importlib import import_module
def load_object(path):
"""Return the Python object represented by dotted *path*."""
i = path.rfind('.')
module_name, object_name = path[:i], path[i + 1:]
# Load module.
try:
module = import_module(module_name)
except ImportError:
raise ImproperlyConfigured('Module %r not found' % module_name)
except ValueError:
raise ImproperlyConfigured('Invalid module %r' % module_name)
# Load object.
try:
return getattr(module, object_name)
except AttributeError:
msg = 'Module %r does not define an object named %r'
raise ImproperlyConfigured(msg % (module_name, object_name))
|
35aca2dc94b129995db292352d7f5e79f05bde0c
|
Lib/test/crashers/compiler_recursion.py
|
Lib/test/crashers/compiler_recursion.py
|
"""
The compiler (>= 2.5) recurses happily.
"""
compile('()'*9**5, '?', 'exec')
|
"""
The compiler (>= 2.5) recurses happily until it blows the stack.
Recorded on the tracker as http://bugs.python.org/issue11383
"""
# The variant below blows up in compiler_call, but there are assorted
# other variations that blow up in other functions
# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
# The exact limit to destroy the stack will vary by platform
# but 100k should do the trick most places
compile('()'*10**5, '?', 'exec')
|
Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it
|
Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
"""
The compiler (>= 2.5) recurses happily.
"""
compile('()'*9**5, '?', 'exec')
Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it
|
"""
The compiler (>= 2.5) recurses happily until it blows the stack.
Recorded on the tracker as http://bugs.python.org/issue11383
"""
# The variant below blows up in compiler_call, but there are assorted
# other variations that blow up in other functions
# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
# The exact limit to destroy the stack will vary by platform
# but 100k should do the trick most places
compile('()'*10**5, '?', 'exec')
|
<commit_before>"""
The compiler (>= 2.5) recurses happily.
"""
compile('()'*9**5, '?', 'exec')
<commit_msg>Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it<commit_after>
|
"""
The compiler (>= 2.5) recurses happily until it blows the stack.
Recorded on the tracker as http://bugs.python.org/issue11383
"""
# The variant below blows up in compiler_call, but there are assorted
# other variations that blow up in other functions
# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
# The exact limit to destroy the stack will vary by platform
# but 100k should do the trick most places
compile('()'*10**5, '?', 'exec')
|
"""
The compiler (>= 2.5) recurses happily.
"""
compile('()'*9**5, '?', 'exec')
Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it"""
The compiler (>= 2.5) recurses happily until it blows the stack.
Recorded on the tracker as http://bugs.python.org/issue11383
"""
# The variant below blows up in compiler_call, but there are assorted
# other variations that blow up in other functions
# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
# The exact limit to destroy the stack will vary by platform
# but 100k should do the trick most places
compile('()'*10**5, '?', 'exec')
|
<commit_before>"""
The compiler (>= 2.5) recurses happily.
"""
compile('()'*9**5, '?', 'exec')
<commit_msg>Update compiler recursion crasher to more reliably blow the stack and add a tracker issue for it<commit_after>"""
The compiler (>= 2.5) recurses happily until it blows the stack.
Recorded on the tracker as http://bugs.python.org/issue11383
"""
# The variant below blows up in compiler_call, but there are assorted
# other variations that blow up in other functions
# e.g. '1*'*10**5+'1' will die in compiler_visit_expr
# The exact limit to destroy the stack will vary by platform
# but 100k should do the trick most places
compile('()'*10**5, '?', 'exec')
|
70f568a97f87f039fe06d74e1cf46040e0b6b817
|
tests/test_etcd3.py
|
tests/test_etcd3.py
|
"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
etcd = etcd3.client()
etcd.get('doot')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('doot', 'this is a doot')
@classmethod
def teardown_class(cls):
pass
|
"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import os
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
os.system("etcdctl put /doot/a_key some_value")
etcd = etcd3.client()
etcd.get('/doot/a_key')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('/doot', 'this is a doot')
@classmethod
def teardown_class(cls):
os.system("etcdctl -w json del --prefix /doot")
|
Clean out testing keys with etcdctl
|
Clean out testing keys with etcdctl
|
Python
|
apache-2.0
|
kragniz/python-etcd3
|
"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
etcd = etcd3.client()
etcd.get('doot')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('doot', 'this is a doot')
@classmethod
def teardown_class(cls):
pass
Clean out testing keys with etcdctl
|
"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import os
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
os.system("etcdctl put /doot/a_key some_value")
etcd = etcd3.client()
etcd.get('/doot/a_key')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('/doot', 'this is a doot')
@classmethod
def teardown_class(cls):
os.system("etcdctl -w json del --prefix /doot")
|
<commit_before>"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
etcd = etcd3.client()
etcd.get('doot')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('doot', 'this is a doot')
@classmethod
def teardown_class(cls):
pass
<commit_msg>Clean out testing keys with etcdctl<commit_after>
|
"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import os
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
os.system("etcdctl put /doot/a_key some_value")
etcd = etcd3.client()
etcd.get('/doot/a_key')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('/doot', 'this is a doot')
@classmethod
def teardown_class(cls):
os.system("etcdctl -w json del --prefix /doot")
|
"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
etcd = etcd3.client()
etcd.get('doot')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('doot', 'this is a doot')
@classmethod
def teardown_class(cls):
pass
Clean out testing keys with etcdctl"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import os
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
os.system("etcdctl put /doot/a_key some_value")
etcd = etcd3.client()
etcd.get('/doot/a_key')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('/doot', 'this is a doot')
@classmethod
def teardown_class(cls):
os.system("etcdctl -w json del --prefix /doot")
|
<commit_before>"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
etcd = etcd3.client()
etcd.get('doot')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('doot', 'this is a doot')
@classmethod
def teardown_class(cls):
pass
<commit_msg>Clean out testing keys with etcdctl<commit_after>"""
test_etcd3
----------------------------------
Tests for `etcd3` module.
"""
import os
import pytest
import etcd3
class TestEtcd3(object):
@classmethod
def setup_class(cls):
pass
def test_client_stub(self):
etcd = etcd3.client()
assert etcd is not None
def test_get_unknown_key(self):
etcd = etcd3.client()
with pytest.raises(etcd3.exceptions.KeyNotFoundError):
etcd.get('probably-invalid-key')
def test_get_key(self):
os.system("etcdctl put /doot/a_key some_value")
etcd = etcd3.client()
etcd.get('/doot/a_key')
def test_put_key(self):
etcd = etcd3.client()
etcd.put('/doot', 'this is a doot')
@classmethod
def teardown_class(cls):
os.system("etcdctl -w json del --prefix /doot")
|
ca558816e26c2899a597698fc773c762d25956c4
|
tk/material/apps.py
|
tk/material/apps.py
|
from django.apps import AppConfig
from django.db.models.signals import post_save
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
|
from django.apps import AppConfig
from django.db.models.signals import post_save
from django.utils import translation
from django.conf import settings
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'urls', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
def urls(self, obj):
urls = {}
for lang, _ in settings.LANGUAGES:
translation.activate(lang)
urls[lang] = obj.get_absolute_url()
return urls
def get_url(self, obj):
# URLs are localized, cannot store in a text field
return ''
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
|
Index localised URLs for search
|
Index localised URLs for search
|
Python
|
agpl-3.0
|
GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa
|
from django.apps import AppConfig
from django.db.models.signals import post_save
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
Index localised URLs for search
|
from django.apps import AppConfig
from django.db.models.signals import post_save
from django.utils import translation
from django.conf import settings
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'urls', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
def urls(self, obj):
urls = {}
for lang, _ in settings.LANGUAGES:
translation.activate(lang)
urls[lang] = obj.get_absolute_url()
return urls
def get_url(self, obj):
# URLs are localized, cannot store in a text field
return ''
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
|
<commit_before>from django.apps import AppConfig
from django.db.models.signals import post_save
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
<commit_msg>Index localised URLs for search<commit_after>
|
from django.apps import AppConfig
from django.db.models.signals import post_save
from django.utils import translation
from django.conf import settings
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'urls', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
def urls(self, obj):
urls = {}
for lang, _ in settings.LANGUAGES:
translation.activate(lang)
urls[lang] = obj.get_absolute_url()
return urls
def get_url(self, obj):
# URLs are localized, cannot store in a text field
return ''
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
|
from django.apps import AppConfig
from django.db.models.signals import post_save
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
Index localised URLs for searchfrom django.apps import AppConfig
from django.db.models.signals import post_save
from django.utils import translation
from django.conf import settings
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'urls', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
def urls(self, obj):
urls = {}
for lang, _ in settings.LANGUAGES:
translation.activate(lang)
urls[lang] = obj.get_absolute_url()
return urls
def get_url(self, obj):
# URLs are localized, cannot store in a text field
return ''
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
|
<commit_before>from django.apps import AppConfig
from django.db.models.signals import post_save
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
<commit_msg>Index localised URLs for search<commit_after>from django.apps import AppConfig
from django.db.models.signals import post_save
from django.utils import translation
from django.conf import settings
from watson import search
from localized_fields.fields import LocalizedField
class MaterialSearchAdapter(search.SearchAdapter):
"""
Dumps all translated titles and descriptions into the search index.
The translated fields are stored as metadata.
"""
@property
def store(self):
return ['title', 'urls', 'brief']
def _join_translations(self, field: LocalizedField) -> str:
return ' '.join([v for v in field.values() if v is not None])
def get_title(self, obj):
return self._join_translations(getattr(obj, 'title'))
def get_description(self, obj):
return self._join_translations(getattr(obj, 'brief'))
def urls(self, obj):
urls = {}
for lang, _ in settings.LANGUAGES:
translation.activate(lang)
urls[lang] = obj.get_absolute_url()
return urls
def get_url(self, obj):
# URLs are localized, cannot store in a text field
return ''
class MaterialConfig(AppConfig):
name = 'tk.material'
def ready(self):
for mn in ['Activity', 'Reading', 'Video', 'Link']:
m = self.get_model(mn)
search.register(m.objects.approved(), MaterialSearchAdapter)
|
84ee7f1c63b992a6e581ca2fcd33522ae19446ff
|
grako/__init__.py
|
grako/__init__.py
|
# -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
codegen = tool.codegen
def main():
tool.main()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
gencode = tool.gencode
def main():
tool.main()
if __name__ == '__main__':
main()
|
Revert unwanted change in tool/script.
|
Revert unwanted change in tool/script.
|
Python
|
bsd-2-clause
|
vmuriart/grako,frnknglrt/grako
|
# -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
codegen = tool.codegen
def main():
tool.main()
if __name__ == '__main__':
main()
Revert unwanted change in tool/script.
|
# -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
gencode = tool.gencode
def main():
tool.main()
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
codegen = tool.codegen
def main():
tool.main()
if __name__ == '__main__':
main()
<commit_msg>Revert unwanted change in tool/script.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
gencode = tool.gencode
def main():
tool.main()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
codegen = tool.codegen
def main():
tool.main()
if __name__ == '__main__':
main()
Revert unwanted change in tool/script.# -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
gencode = tool.gencode
def main():
tool.main()
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
codegen = tool.codegen
def main():
tool.main()
if __name__ == '__main__':
main()
<commit_msg>Revert unwanted change in tool/script.<commit_after># -*- coding: utf-8 -*-
"""
Parse and translate an EBNF grammar into a Python parser for
the described language.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from grako import tool
genmodel = tool.genmodel
gencode = tool.gencode
def main():
tool.main()
if __name__ == '__main__':
main()
|
6136fc2bd2d9d191df7a9e6afd3aa9e4f110d61e
|
numpy/core/tests/test_print.py
|
numpy/core/tests/test_print.py
|
import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
|
import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
|
Use parametric tests for format tests so that it is clearer which type is failing.
|
Use parametric tests for format tests so that it is clearer which type is failing.
|
Python
|
bsd-3-clause
|
solarjoe/numpy,NextThought/pypy-numpy,musically-ut/numpy,trankmichael/numpy,ViralLeadership/numpy,b-carter/numpy,argriffing/numpy,ogrisel/numpy,mhvk/numpy,mingwpy/numpy,b-carter/numpy,ewmoore/numpy,jakirkham/numpy,ahaldane/numpy,KaelChen/numpy,mhvk/numpy,utke1/numpy,ogrisel/numpy,skymanaditya1/numpy,rmcgibbo/numpy,embray/numpy,immerrr/numpy,ekalosak/numpy,skymanaditya1/numpy,ssanderson/numpy,pelson/numpy,abalkin/numpy,GaZ3ll3/numpy,rhythmsosad/numpy,kiwifb/numpy,numpy/numpy-refactor,nguyentu1602/numpy,maniteja123/numpy,BMJHayward/numpy,empeeu/numpy,has2k1/numpy,dwillmer/numpy,githubmlai/numpy,pyparallel/numpy,BMJHayward/numpy,seberg/numpy,Srisai85/numpy,AustereCuriosity/numpy,dwf/numpy,GrimDerp/numpy,numpy/numpy,charris/numpy,abalkin/numpy,Anwesh43/numpy,ViralLeadership/numpy,dato-code/numpy,rgommers/numpy,pizzathief/numpy,ChanderG/numpy,madphysicist/numpy,dwf/numpy,brandon-rhodes/numpy,gfyoung/numpy,groutr/numpy,numpy/numpy-refactor,CMartelLML/numpy,jonathanunderwood/numpy,rajathkumarmp/numpy,cowlicks/numpy,rhythmsosad/numpy,joferkington/numpy,joferkington/numpy,BabeNovelty/numpy,utke1/numpy,Yusa95/numpy,GrimDerp/numpy,astrofrog/numpy,charris/numpy,CMartelLML/numpy,GaZ3ll3/numpy,cjermain/numpy,ajdawson/numpy,MSeifert04/numpy,MaPePeR/numpy,has2k1/numpy,andsor/numpy,githubmlai/numpy,larsmans/numpy,matthew-brett/numpy,mindw/numpy,Dapid/numpy,nguyentu1602/numpy,gmcastil/numpy,abalkin/numpy,cjermain/numpy,rajathkumarmp/numpy,Anwesh43/numpy,mortada/numpy,sigma-random/numpy,numpy/numpy,chiffa/numpy,Dapid/numpy,mathdd/numpy,hainm/numpy,jschueller/numpy,ogrisel/numpy,brandon-rhodes/numpy,ewmoore/numpy,dwillmer/numpy,ekalosak/numpy,tacaswell/numpy,numpy/numpy-refactor,mwiebe/numpy,bertrand-l/numpy,jakirkham/numpy,endolith/numpy,embray/numpy,solarjoe/numpy,mattip/numpy,MaPePeR/numpy,mwiebe/numpy,AustereCuriosity/numpy,ESSS/numpy,matthew-brett/numpy,argriffing/numpy,nbeaver/numpy,dimasad/numpy,drasmuss/numpy,rudimeier/numpy,Eric89GXL/numpy,naritta/numpy,chatcannon/numpy,Dapid/numpy,jakirkham/numpy,AustereCuriosity/numpy,ESSS/numpy,tdsmith/numpy,rmcgibbo/numpy,pizzathief/numpy,rherault-insa/numpy,embray/numpy,seberg/numpy,rudimeier/numpy,mathdd/numpy,SunghanKim/numpy,MaPePeR/numpy,ddasilva/numpy,naritta/numpy,GrimDerp/numpy,nbeaver/numpy,numpy/numpy,drasmuss/numpy,kiwifb/numpy,mwiebe/numpy,bertrand-l/numpy,ogrisel/numpy,ahaldane/numpy,mortada/numpy,numpy/numpy-refactor,andsor/numpy,astrofrog/numpy,skwbc/numpy,MaPePeR/numpy,githubmlai/numpy,andsor/numpy,empeeu/numpy,dwillmer/numpy,CMartelLML/numpy,Srisai85/numpy,dimasad/numpy,madphysicist/numpy,cjermain/numpy,shoyer/numpy,ewmoore/numpy,drasmuss/numpy,bringingheavendown/numpy,dato-code/numpy,NextThought/pypy-numpy,Anwesh43/numpy,bmorris3/numpy,andsor/numpy,felipebetancur/numpy,mindw/numpy,mhvk/numpy,GaZ3ll3/numpy,skwbc/numpy,BMJHayward/numpy,jschueller/numpy,musically-ut/numpy,charris/numpy,matthew-brett/numpy,pelson/numpy,joferkington/numpy,seberg/numpy,jankoslavic/numpy,WillieMaddox/numpy,grlee77/numpy,Linkid/numpy,KaelChen/numpy,yiakwy/numpy,ChristopherHogan/numpy,MichaelAquilina/numpy,sigma-random/numpy,chatcannon/numpy,sinhrks/numpy,grlee77/numpy,dwillmer/numpy,SiccarPoint/numpy,gfyoung/numpy,pbrod/numpy,simongibbons/numpy,jorisvandenbossche/numpy,MSeifert04/numpy,ViralLeadership/numpy,yiakwy/numpy,njase/numpy,yiakwy/numpy,jorisvandenbossche/numpy,leifdenby/numpy,rhythmsosad/numpy,simongibbons/numpy,trankmichael/numpy,charris/numpy,brandon-rhodes/numpy,tdsmith/numpy,seberg/numpy,pdebuyl/numpy,numpy/numpy-refactor,rudimeier/numpy,anntzer/numpy,jorisvandenbossche/numpy,rajathkumarmp/numpy,musically-ut/numpy,grlee77/numpy,MSeifert04/numpy,bertrand-l/numpy,pyparallel/numpy,ajdawson/numpy,SunghanKim/numpy,KaelChen/numpy,jakirkham/numpy,chatcannon/numpy,kiwifb/numpy,pelson/numpy,Linkid/numpy,simongibbons/numpy,NextThought/pypy-numpy,madphysicist/numpy,MSeifert04/numpy,behzadnouri/numpy,shoyer/numpy,rherault-insa/numpy,WarrenWeckesser/numpy,ContinuumIO/numpy,embray/numpy,pdebuyl/numpy,kirillzhuravlev/numpy,utke1/numpy,larsmans/numpy,GrimDerp/numpy,immerrr/numpy,stefanv/numpy,dwf/numpy,sonnyhu/numpy,ewmoore/numpy,mhvk/numpy,ChanderG/numpy,hainm/numpy,SiccarPoint/numpy,jankoslavic/numpy,behzadnouri/numpy,musically-ut/numpy,mortada/numpy,pbrod/numpy,hainm/numpy,Yusa95/numpy,pizzathief/numpy,embray/numpy,leifdenby/numpy,stuarteberg/numpy,groutr/numpy,sigma-random/numpy,jankoslavic/numpy,solarjoe/numpy,moreati/numpy,pelson/numpy,immerrr/numpy,dimasad/numpy,endolith/numpy,hainm/numpy,larsmans/numpy,tynn/numpy,SunghanKim/numpy,trankmichael/numpy,nbeaver/numpy,pbrod/numpy,ChristopherHogan/numpy,ajdawson/numpy,sigma-random/numpy,dch312/numpy,tdsmith/numpy,stuarteberg/numpy,brandon-rhodes/numpy,has2k1/numpy,MichaelAquilina/numpy,mhvk/numpy,mingwpy/numpy,pelson/numpy,tynn/numpy,rgommers/numpy,felipebetancur/numpy,dato-code/numpy,rmcgibbo/numpy,WarrenWeckesser/numpy,WarrenWeckesser/numpy,rherault-insa/numpy,bringingheavendown/numpy,simongibbons/numpy,jakirkham/numpy,jorisvandenbossche/numpy,dwf/numpy,anntzer/numpy,jankoslavic/numpy,Yusa95/numpy,ajdawson/numpy,sinhrks/numpy,ogrisel/numpy,ssanderson/numpy,naritta/numpy,madphysicist/numpy,moreati/numpy,WarrenWeckesser/numpy,SunghanKim/numpy,numpy/numpy,jschueller/numpy,kirillzhuravlev/numpy,WillieMaddox/numpy,ssanderson/numpy,Eric89GXL/numpy,sinhrks/numpy,stuarteberg/numpy,SiccarPoint/numpy,endolith/numpy,tynn/numpy,maniteja123/numpy,BabeNovelty/numpy,skwbc/numpy,BabeNovelty/numpy,SiccarPoint/numpy,rudimeier/numpy,WarrenWeckesser/numpy,NextThought/pypy-numpy,trankmichael/numpy,dwf/numpy,sonnyhu/numpy,dch312/numpy,tacaswell/numpy,pbrod/numpy,ESSS/numpy,empeeu/numpy,larsmans/numpy,MSeifert04/numpy,maniteja123/numpy,bmorris3/numpy,gfyoung/numpy,mattip/numpy,jorisvandenbossche/numpy,ekalosak/numpy,moreati/numpy,pdebuyl/numpy,MichaelAquilina/numpy,kirillzhuravlev/numpy,ddasilva/numpy,Eric89GXL/numpy,matthew-brett/numpy,mortada/numpy,mathdd/numpy,empeeu/numpy,felipebetancur/numpy,Anwesh43/numpy,pdebuyl/numpy,ewmoore/numpy,bringingheavendown/numpy,behzadnouri/numpy,dato-code/numpy,astrofrog/numpy,Linkid/numpy,gmcastil/numpy,rhythmsosad/numpy,chiffa/numpy,endolith/numpy,stuarteberg/numpy,groutr/numpy,Linkid/numpy,GaZ3ll3/numpy,tacaswell/numpy,rajathkumarmp/numpy,jonathanunderwood/numpy,mindw/numpy,simongibbons/numpy,kirillzhuravlev/numpy,cowlicks/numpy,nguyentu1602/numpy,anntzer/numpy,njase/numpy,chiffa/numpy,pizzathief/numpy,shoyer/numpy,Srisai85/numpy,CMartelLML/numpy,githubmlai/numpy,WillieMaddox/numpy,ContinuumIO/numpy,ahaldane/numpy,mingwpy/numpy,madphysicist/numpy,naritta/numpy,sinhrks/numpy,KaelChen/numpy,stefanv/numpy,felipebetancur/numpy,yiakwy/numpy,cowlicks/numpy,MichaelAquilina/numpy,mindw/numpy,gmcastil/numpy,bmorris3/numpy,dch312/numpy,ahaldane/numpy,Yusa95/numpy,jonathanunderwood/numpy,grlee77/numpy,mathdd/numpy,sonnyhu/numpy,rgommers/numpy,rmcgibbo/numpy,tdsmith/numpy,astrofrog/numpy,sonnyhu/numpy,Srisai85/numpy,ahaldane/numpy,mattip/numpy,stefanv/numpy,shoyer/numpy,ChristopherHogan/numpy,ChanderG/numpy,rgommers/numpy,bmorris3/numpy,argriffing/numpy,mingwpy/numpy,pbrod/numpy,cjermain/numpy,pyparallel/numpy,anntzer/numpy,has2k1/numpy,b-carter/numpy,BMJHayward/numpy,stefanv/numpy,cowlicks/numpy,ChanderG/numpy,joferkington/numpy,skymanaditya1/numpy,dch312/numpy,BabeNovelty/numpy,matthew-brett/numpy,ddasilva/numpy,astrofrog/numpy,immerrr/numpy,jschueller/numpy,shoyer/numpy,grlee77/numpy,leifdenby/numpy,Eric89GXL/numpy,ChristopherHogan/numpy,ekalosak/numpy,stefanv/numpy,skymanaditya1/numpy,mattip/numpy,dimasad/numpy,ContinuumIO/numpy,njase/numpy,pizzathief/numpy,nguyentu1602/numpy
|
import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
Use parametric tests for format tests so that it is clearer which type is failing.
|
import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
|
<commit_before>import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
<commit_msg>Use parametric tests for format tests so that it is clearer which type is failing.<commit_after>
|
import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
|
import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
Use parametric tests for format tests so that it is clearer which type is failing.import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
|
<commit_before>import numpy as np
from numpy.testing import *
class TestPrint(TestCase):
def test_float_types(self) :
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(float(x)))
def test_complex_types(self) :
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(t(x)), str(complex(x)))
assert_equal(str(t(x*1j)), str(complex(x*1j)))
assert_equal(str(t(x + x*1j)), str(complex(x + x*1j)))
if __name__ == "__main__":
run_module_suite()
<commit_msg>Use parametric tests for format tests so that it is clearer which type is failing.<commit_after>import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
|
32376bf577af51ed43819aa92e89231886e6b619
|
tests/functional/test_new_resolver_errors.py
|
tests/functional/test_new_resolver_errors.py
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
Test for constraint in message
|
Test for constraint in message
|
Python
|
mit
|
pypa/pip,pradyunsg/pip,sbidoul/pip,sbidoul/pip,pypa/pip,pfmoore/pip,pfmoore/pip,pradyunsg/pip
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
Test for constraint in message
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
<commit_before>from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
<commit_msg>Test for constraint in message<commit_after>
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
Test for constraint in messagefrom tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
<commit_before>from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
<commit_msg>Test for constraint in message<commit_after>from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
22cc4bc4ce8550554f931c6a95fef71b074ba07d
|
journal.py
|
journal.py
|
# -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
|
# -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
from flask import g
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
def get_database_connection():
db = getattr(g, 'db', None)
if db is None:
g.db = db = connect_db()
return db
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
if exception and isinstance(exception, psycopg2.Error):
# if there was a problem with the database, rollback any
# existing transaction
db.rollback()
else:
# otherwise, commit
db.commit()
db.close()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
|
Add get_database_connection() and teardown_request() to handle getting and releasing a connnection.
|
Add get_database_connection() and teardown_request() to handle getting and releasing a connnection.
|
Python
|
mit
|
sazlin/learning_journal
|
# -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
Add get_database_connection() and teardown_request() to handle getting and releasing a connnection.
|
# -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
from flask import g
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
def get_database_connection():
db = getattr(g, 'db', None)
if db is None:
g.db = db = connect_db()
return db
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
if exception and isinstance(exception, psycopg2.Error):
# if there was a problem with the database, rollback any
# existing transaction
db.rollback()
else:
# otherwise, commit
db.commit()
db.close()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before># -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Add get_database_connection() and teardown_request() to handle getting and releasing a connnection.<commit_after>
|
# -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
from flask import g
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
def get_database_connection():
db = getattr(g, 'db', None)
if db is None:
g.db = db = connect_db()
return db
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
if exception and isinstance(exception, psycopg2.Error):
# if there was a problem with the database, rollback any
# existing transaction
db.rollback()
else:
# otherwise, commit
db.commit()
db.close()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
|
# -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
Add get_database_connection() and teardown_request() to handle getting and releasing a connnection.# -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
from flask import g
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
def get_database_connection():
db = getattr(g, 'db', None)
if db is None:
g.db = db = connect_db()
return db
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
if exception and isinstance(exception, psycopg2.Error):
# if there was a problem with the database, rollback any
# existing transaction
db.rollback()
else:
# otherwise, commit
db.commit()
db.close()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before># -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Add get_database_connection() and teardown_request() to handle getting and releasing a connnection.<commit_after># -*- coding: utf-8 -*-
from flask import Flask
import os
import psycopg2
from contextlib import closing
from flask import g
DB_SCHEMA = """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app = Flask(__name__)
app.config['DATABASE'] = os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=sazlin'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
def get_database_connection():
db = getattr(g, 'db', None)
if db is None:
g.db = db = connect_db()
return db
@app.teardown_request
def teardown_request(exception):
db = getattr(g, 'db', None)
if db is not None:
if exception and isinstance(exception, psycopg2.Error):
# if there was a problem with the database, rollback any
# existing transaction
db.rollback()
else:
# otherwise, commit
db.commit()
db.close()
@app.route('/')
def hello():
return u'Hello world!'
if __name__ == '__main__':
app.run(debug=True)
|
4e096718efc41602023d1e2833467d158d2a0456
|
oscar_sagepay/dashboard/app.py
|
oscar_sagepay/dashboard/app.py
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from oscar.apps.dashboard.nav import register, Node
from . import views
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
Add dashboard node for transaction list
|
Add dashboard node for transaction list
|
Python
|
bsd-3-clause
|
django-oscar/django-oscar-sagepay-direct
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
Add dashboard node for transaction list
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from oscar.apps.dashboard.nav import register, Node
from . import views
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
<commit_before>from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
<commit_msg>Add dashboard node for transaction list<commit_after>
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from oscar.apps.dashboard.nav import register, Node
from . import views
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
Add dashboard node for transaction listfrom django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from oscar.apps.dashboard.nav import register, Node
from . import views
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
<commit_before>from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from . import views
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
<commit_msg>Add dashboard node for transaction list<commit_after>from django.conf.urls import patterns, url
from django.contrib.admin.views.decorators import staff_member_required
from oscar.core.application import Application
from oscar.apps.dashboard.nav import register, Node
from . import views
node = Node('Datacash', 'sagepay-transaction-list')
register(node, 100)
class SagepayDashboard(Application):
name = None
list_view = views.Transactions
detail_view = views.Transaction
def get_urls(self):
urlpatterns = patterns('',
url(r'^transactions/$', self.list_view.as_view(),
name='sagepay-transaction-list'),
url(r'^transactions/(?P<pk>\d+)/$', self.detail_view.as_view(),
name='sagepay-transaction-detail'),
)
return self.post_process_urls(urlpatterns)
def get_url_decorator(self, url_name):
return staff_member_required
application = SagepayDashboard()
|
ca5453672be43168bd7a53e1b4d63c2c53ca7365
|
marconiclient/version.py
|
marconiclient/version.py
|
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pbr.version
version_info = pbr.version.VersionInfo('marconi')
|
# Copyright (c) 2013 Red Hat, Inc.
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pkg_resources
try:
# First, try to get our version out of PKG-INFO. If we're installed,
# this'll let us find our version without pulling in pbr. After all, if
# we're installed on a system, we're not in a Git-managed source tree, so
# pbr doesn't really buy us anything.
version_string = pkg_resources.get_provider(
pkg_resources.Requirement.parse('python-marconiclient')).version
except pkg_resources.DistributionNotFound:
# No PKG-INFO? We're probably running from a checkout, then. Let pbr do
# its thing to figure out a version number.
import pbr.version
version_string = str(pbr.version.VersionInfo('python-marconiclient'))
|
Make pbr only a build-time dependency.
|
Make pbr only a build-time dependency.
This lets you build python-marconiclient packages that don't require pbr
to be installed at all. You would need pbr on the machine running
rpmbuild / debuild, but not on the machines that install the packages.
Unfortunately, this does not make python-marconiclient able to be
installed via pip 0.3.1 on Lucid; you'll need to uninstall the system
python-pip package and install a new pip some other way. Given that pip
< 1.3 doesn't perform SSL certificate validation for pypi (trivial MITM
attack, anyone?), you'd probably want to get a new pip anyway.
Change-Id: I2fb160b6dd446f1fc72eed2371db1afcb00b9e52
|
Python
|
apache-2.0
|
openstack/python-zaqarclient
|
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pbr.version
version_info = pbr.version.VersionInfo('marconi')
Make pbr only a build-time dependency.
This lets you build python-marconiclient packages that don't require pbr
to be installed at all. You would need pbr on the machine running
rpmbuild / debuild, but not on the machines that install the packages.
Unfortunately, this does not make python-marconiclient able to be
installed via pip 0.3.1 on Lucid; you'll need to uninstall the system
python-pip package and install a new pip some other way. Given that pip
< 1.3 doesn't perform SSL certificate validation for pypi (trivial MITM
attack, anyone?), you'd probably want to get a new pip anyway.
Change-Id: I2fb160b6dd446f1fc72eed2371db1afcb00b9e52
|
# Copyright (c) 2013 Red Hat, Inc.
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pkg_resources
try:
# First, try to get our version out of PKG-INFO. If we're installed,
# this'll let us find our version without pulling in pbr. After all, if
# we're installed on a system, we're not in a Git-managed source tree, so
# pbr doesn't really buy us anything.
version_string = pkg_resources.get_provider(
pkg_resources.Requirement.parse('python-marconiclient')).version
except pkg_resources.DistributionNotFound:
# No PKG-INFO? We're probably running from a checkout, then. Let pbr do
# its thing to figure out a version number.
import pbr.version
version_string = str(pbr.version.VersionInfo('python-marconiclient'))
|
<commit_before># Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pbr.version
version_info = pbr.version.VersionInfo('marconi')
<commit_msg>Make pbr only a build-time dependency.
This lets you build python-marconiclient packages that don't require pbr
to be installed at all. You would need pbr on the machine running
rpmbuild / debuild, but not on the machines that install the packages.
Unfortunately, this does not make python-marconiclient able to be
installed via pip 0.3.1 on Lucid; you'll need to uninstall the system
python-pip package and install a new pip some other way. Given that pip
< 1.3 doesn't perform SSL certificate validation for pypi (trivial MITM
attack, anyone?), you'd probably want to get a new pip anyway.
Change-Id: I2fb160b6dd446f1fc72eed2371db1afcb00b9e52<commit_after>
|
# Copyright (c) 2013 Red Hat, Inc.
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pkg_resources
try:
# First, try to get our version out of PKG-INFO. If we're installed,
# this'll let us find our version without pulling in pbr. After all, if
# we're installed on a system, we're not in a Git-managed source tree, so
# pbr doesn't really buy us anything.
version_string = pkg_resources.get_provider(
pkg_resources.Requirement.parse('python-marconiclient')).version
except pkg_resources.DistributionNotFound:
# No PKG-INFO? We're probably running from a checkout, then. Let pbr do
# its thing to figure out a version number.
import pbr.version
version_string = str(pbr.version.VersionInfo('python-marconiclient'))
|
# Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pbr.version
version_info = pbr.version.VersionInfo('marconi')
Make pbr only a build-time dependency.
This lets you build python-marconiclient packages that don't require pbr
to be installed at all. You would need pbr on the machine running
rpmbuild / debuild, but not on the machines that install the packages.
Unfortunately, this does not make python-marconiclient able to be
installed via pip 0.3.1 on Lucid; you'll need to uninstall the system
python-pip package and install a new pip some other way. Given that pip
< 1.3 doesn't perform SSL certificate validation for pypi (trivial MITM
attack, anyone?), you'd probably want to get a new pip anyway.
Change-Id: I2fb160b6dd446f1fc72eed2371db1afcb00b9e52# Copyright (c) 2013 Red Hat, Inc.
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pkg_resources
try:
# First, try to get our version out of PKG-INFO. If we're installed,
# this'll let us find our version without pulling in pbr. After all, if
# we're installed on a system, we're not in a Git-managed source tree, so
# pbr doesn't really buy us anything.
version_string = pkg_resources.get_provider(
pkg_resources.Requirement.parse('python-marconiclient')).version
except pkg_resources.DistributionNotFound:
# No PKG-INFO? We're probably running from a checkout, then. Let pbr do
# its thing to figure out a version number.
import pbr.version
version_string = str(pbr.version.VersionInfo('python-marconiclient'))
|
<commit_before># Copyright (c) 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pbr.version
version_info = pbr.version.VersionInfo('marconi')
<commit_msg>Make pbr only a build-time dependency.
This lets you build python-marconiclient packages that don't require pbr
to be installed at all. You would need pbr on the machine running
rpmbuild / debuild, but not on the machines that install the packages.
Unfortunately, this does not make python-marconiclient able to be
installed via pip 0.3.1 on Lucid; you'll need to uninstall the system
python-pip package and install a new pip some other way. Given that pip
< 1.3 doesn't perform SSL certificate validation for pypi (trivial MITM
attack, anyone?), you'd probably want to get a new pip anyway.
Change-Id: I2fb160b6dd446f1fc72eed2371db1afcb00b9e52<commit_after># Copyright (c) 2013 Red Hat, Inc.
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pkg_resources
try:
# First, try to get our version out of PKG-INFO. If we're installed,
# this'll let us find our version without pulling in pbr. After all, if
# we're installed on a system, we're not in a Git-managed source tree, so
# pbr doesn't really buy us anything.
version_string = pkg_resources.get_provider(
pkg_resources.Requirement.parse('python-marconiclient')).version
except pkg_resources.DistributionNotFound:
# No PKG-INFO? We're probably running from a checkout, then. Let pbr do
# its thing to figure out a version number.
import pbr.version
version_string = str(pbr.version.VersionInfo('python-marconiclient'))
|
2bd449678d34187efdf3e4ca92daa28ea1d9fa48
|
imagemodal/mixins/fragment.py
|
imagemodal/mixins/fragment.py
|
"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
template = 'templates/' + template
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
|
"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
template = 'templates/' + template
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
|
Fix bug to make template optional
|
Fix bug to make template optional
|
Python
|
agpl-3.0
|
Stanford-Online/xblock-image-modal,Stanford-Online/xblock-image-modal,Stanford-Online/xblock-image-modal
|
"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
template = 'templates/' + template
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
Fix bug to make template optional
|
"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
template = 'templates/' + template
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
|
<commit_before>"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
template = 'templates/' + template
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
<commit_msg>Fix bug to make template optional<commit_after>
|
"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
template = 'templates/' + template
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
|
"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
template = 'templates/' + template
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
Fix bug to make template optional"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
template = 'templates/' + template
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
|
<commit_before>"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
template = 'templates/' + template
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
<commit_msg>Fix bug to make template optional<commit_after>"""
Mixin fragment/html behavior into XBlocks
"""
from __future__ import absolute_import
from django.template.context import Context
from xblock.fragment import Fragment
class XBlockFragmentBuilderMixin(object):
"""
Create a default XBlock fragment builder
"""
def build_fragment(
self,
template='',
context=None,
css=None,
js=None,
js_init=None,
):
"""
Creates a fragment for display.
"""
context = context or {}
css = css or []
js = js or []
rendered_template = ''
if template:
template = 'templates/' + template
rendered_template = self.loader.render_django_template(
template,
context=Context(context),
i18n_service=self.runtime.service(self, 'i18n'),
)
fragment = Fragment(rendered_template)
for item in css:
if item.startswith('/'):
url = item
else:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_css_url(url)
for item in js:
item = 'public/' + item
url = self.runtime.local_resource_url(self, item)
fragment.add_javascript_url(url)
if js_init:
fragment.initialize_js(js_init)
return fragment
|
af4e705138887f950ebdb3cef06dfab77d89a1b9
|
indra/util/plot_formatting.py
|
indra/util/plot_formatting.py
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
# 3-color swatch from colorbrewer2.org
GREEN = "#66C2A5"
ORANGE = "#FC8D62"
PURPLE = "#8DA0CB"
|
Add default colors to plot formatting
|
Add default colors to plot formatting
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,bgyori/indra,johnbachman/indra,bgyori/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,sorgerlab/belpy
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
Add default colors to plot formatting
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
# 3-color swatch from colorbrewer2.org
GREEN = "#66C2A5"
ORANGE = "#FC8D62"
PURPLE = "#8DA0CB"
|
<commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
<commit_msg>Add default colors to plot formatting<commit_after>
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
# 3-color swatch from colorbrewer2.org
GREEN = "#66C2A5"
ORANGE = "#FC8D62"
PURPLE = "#8DA0CB"
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
Add default colors to plot formattingfrom __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
# 3-color swatch from colorbrewer2.org
GREEN = "#66C2A5"
ORANGE = "#FC8D62"
PURPLE = "#8DA0CB"
|
<commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
<commit_msg>Add default colors to plot formatting<commit_after>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import matplotlib
fontsize=7
def set_fig_params():
matplotlib.rcParams['font.sans-serif'] = 'Arial'
matplotlib.rcParams['text.usetex'] = True
matplotlib.rcParams['text.latex.preamble'] = [
'\\usepackage{helvet}',
'\\usepackage{sansmath}',
'\\sansmath',
'\\usepackage{underscore}',]
def format_axis(ax, label_padding=2, tick_padding=0, yticks_position='left'):
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position(yticks_position)
ax.yaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.set_tick_params(which='both', direction='out', labelsize=fontsize,
pad=tick_padding, length=2, width=0.5)
ax.xaxis.labelpad = label_padding
ax.yaxis.labelpad = label_padding
ax.xaxis.label.set_size(fontsize)
ax.yaxis.label.set_size(fontsize)
# 3-color swatch from colorbrewer2.org
GREEN = "#66C2A5"
ORANGE = "#FC8D62"
PURPLE = "#8DA0CB"
|
d7e4bdc6979e3ada1e28ce01e3b3e12d4d197bcf
|
html_table_parser/__init__.py
|
html_table_parser/__init__.py
|
from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'GPLv3'
|
from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'AGPLv3'
|
Correct license in module meta information
|
Correct license in module meta information
|
Python
|
agpl-3.0
|
schmijos/html-table-parser-python3,schmijos/html-table-parser-python3
|
from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'GPLv3'
Correct license in module meta information
|
from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'AGPLv3'
|
<commit_before>from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'GPLv3'
<commit_msg>Correct license in module meta information<commit_after>
|
from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'AGPLv3'
|
from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'GPLv3'
Correct license in module meta informationfrom .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'AGPLv3'
|
<commit_before>from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'GPLv3'
<commit_msg>Correct license in module meta information<commit_after>from .parser import HTMLTableParser
__author__ = 'Josua Schmid'
__version__ = '0.1.1'
__licence__ = 'AGPLv3'
|
09f38a9f13521c69be2e047628c74ed571f0d8de
|
ideascube/wsgi.py
|
ideascube/wsgi.py
|
"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
|
Load the application before running commands
|
Load the application before running commands
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
Load the application before running commands
|
"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
|
<commit_before>"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
<commit_msg>Load the application before running commands<commit_after>
|
"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
|
"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
Load the application before running commands"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
|
<commit_before>"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
<commit_msg>Load the application before running commands<commit_after>"""
WSGI config for ideascube project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ideascube.settings")
os.environ.setdefault("STORAGE_ROOT", "/var/ideascube")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from django.core.management import call_command
call_command('migrate', '--noinput', '--verbosity=1', '--database=default')
call_command('migrate', '--noinput', '--verbosity=1', '--database=transient')
call_command('collectstatic', '--noinput', '--verbosity=1')
|
c30898d785d131a8dc08d93fe4142acda5b34081
|
frappe/core/doctype/docfield/docfield.py
|
frappe/core/doctype/docfield/docfield.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class DocField(Document):
pass
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DocField(Document):
def get_link_doctype(self):
'''Returns the Link doctype for the docfield (if applicable)
if fieldtype is Link: Returns "options"
if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
'''
if self.fieldtype == 'Link':
return self.options
if self.fieldtype == 'Table MultiSelect':
table_doctype = self.options
link_doctype = frappe.db.get_value('DocField', {
'fieldtype': 'Link',
'parenttype': 'DocType',
'parent': table_doctype,
'in_list_view': 1
}, 'options')
return link_doctype
|
Add get_link_doctype method in DocField
|
fix: Add get_link_doctype method in DocField
|
Python
|
mit
|
adityahase/frappe,saurabh6790/frappe,almeidapaulopt/frappe,mhbu50/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,vjFaLk/frappe,adityahase/frappe,vjFaLk/frappe,vjFaLk/frappe,frappe/frappe,vjFaLk/frappe,saurabh6790/frappe,StrellaGroup/frappe,StrellaGroup/frappe,yashodhank/frappe,frappe/frappe,almeidapaulopt/frappe,frappe/frappe,mhbu50/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,mhbu50/frappe,saurabh6790/frappe,yashodhank/frappe,adityahase/frappe,saurabh6790/frappe,adityahase/frappe,yashodhank/frappe
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class DocField(Document):
pass
fix: Add get_link_doctype method in DocField
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DocField(Document):
def get_link_doctype(self):
'''Returns the Link doctype for the docfield (if applicable)
if fieldtype is Link: Returns "options"
if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
'''
if self.fieldtype == 'Link':
return self.options
if self.fieldtype == 'Table MultiSelect':
table_doctype = self.options
link_doctype = frappe.db.get_value('DocField', {
'fieldtype': 'Link',
'parenttype': 'DocType',
'parent': table_doctype,
'in_list_view': 1
}, 'options')
return link_doctype
|
<commit_before># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class DocField(Document):
pass
<commit_msg>fix: Add get_link_doctype method in DocField<commit_after>
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DocField(Document):
def get_link_doctype(self):
'''Returns the Link doctype for the docfield (if applicable)
if fieldtype is Link: Returns "options"
if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
'''
if self.fieldtype == 'Link':
return self.options
if self.fieldtype == 'Table MultiSelect':
table_doctype = self.options
link_doctype = frappe.db.get_value('DocField', {
'fieldtype': 'Link',
'parenttype': 'DocType',
'parent': table_doctype,
'in_list_view': 1
}, 'options')
return link_doctype
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class DocField(Document):
pass
fix: Add get_link_doctype method in DocField# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DocField(Document):
def get_link_doctype(self):
'''Returns the Link doctype for the docfield (if applicable)
if fieldtype is Link: Returns "options"
if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
'''
if self.fieldtype == 'Link':
return self.options
if self.fieldtype == 'Table MultiSelect':
table_doctype = self.options
link_doctype = frappe.db.get_value('DocField', {
'fieldtype': 'Link',
'parenttype': 'DocType',
'parent': table_doctype,
'in_list_view': 1
}, 'options')
return link_doctype
|
<commit_before># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class DocField(Document):
pass
<commit_msg>fix: Add get_link_doctype method in DocField<commit_after># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class DocField(Document):
def get_link_doctype(self):
'''Returns the Link doctype for the docfield (if applicable)
if fieldtype is Link: Returns "options"
if fieldtype is Table MultiSelect: Returns "options" of the Link field in the Child Table
'''
if self.fieldtype == 'Link':
return self.options
if self.fieldtype == 'Table MultiSelect':
table_doctype = self.options
link_doctype = frappe.db.get_value('DocField', {
'fieldtype': 'Link',
'parenttype': 'DocType',
'parent': table_doctype,
'in_list_view': 1
}, 'options')
return link_doctype
|
ff7ec23bcef13412ee4ad997843664bbb4ff3738
|
wedding/wsgi.py
|
wedding/wsgi.py
|
"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(application)
|
Set DJANGO_SETTINGS_MODULE before importing DjangoWhiteNoise
|
[heroku] Set DJANGO_SETTINGS_MODULE before importing DjangoWhiteNoise
|
Python
|
mit
|
jbinney/wedding,jbinney/wedding,jbinney/wedding
|
"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
[heroku] Set DJANGO_SETTINGS_MODULE before importing DjangoWhiteNoise
|
"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(application)
|
<commit_before>"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
<commit_msg>[heroku] Set DJANGO_SETTINGS_MODULE before importing DjangoWhiteNoise<commit_after>
|
"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(application)
|
"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
[heroku] Set DJANGO_SETTINGS_MODULE before importing DjangoWhiteNoise"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(application)
|
<commit_before>"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
<commit_msg>[heroku] Set DJANGO_SETTINGS_MODULE before importing DjangoWhiteNoise<commit_after>"""
WSGI config for wedding project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wedding.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from whitenoise.django import DjangoWhiteNoise
application = DjangoWhiteNoise(application)
|
4601656b62d9bf6185cf99ebd3ee107d1c82ce9a
|
paver/tests/test_path.py
|
paver/tests/test_path.py
|
# -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
merged = b'something/\xc3\xb6'.decode('utf-8') # there is ö after something
if not os.path.supports_unicode_filenames and sys.version_info[0] < 3:
merged = merged.encode('utf-8')
assert merged == os.path.join(paver.path.path('something'), (b'\xc3\xb6').decode('utf-8'))
|
# -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
# This is why we should drop 2.5 asap :]
# b'' strings are not supported in 2.5, while u'' string are not supported in 3.2
# -- even syntactically, so if will not help you here
if sys.version_info[0] < 3:
expected = 'something/\xc3\xb6'
unicode_o = '\xc3\xb6'.decode('utf-8')
# path.py on py2 is inheriting from str instead of unicode under this
# circumstances, therefore we have to expect string
if os.path.supports_unicode_filenames:
expected.decode('utf-8')
else:
expected = 'something/ö'
unicode_o = 'ö'
assert expected == os.path.join(paver.path.path('something'), unicode_o)
|
Rewrite unicode join test to work with py25
|
Rewrite unicode join test to work with py25
|
Python
|
bsd-3-clause
|
nikolas/paver,cecedille1/paver,gregorynicholas/paver,phargogh/paver,gregorynicholas/paver,thedrow/paver,cecedille1/paver
|
# -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
merged = b'something/\xc3\xb6'.decode('utf-8') # there is ö after something
if not os.path.supports_unicode_filenames and sys.version_info[0] < 3:
merged = merged.encode('utf-8')
assert merged == os.path.join(paver.path.path('something'), (b'\xc3\xb6').decode('utf-8'))
Rewrite unicode join test to work with py25
|
# -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
# This is why we should drop 2.5 asap :]
# b'' strings are not supported in 2.5, while u'' string are not supported in 3.2
# -- even syntactically, so if will not help you here
if sys.version_info[0] < 3:
expected = 'something/\xc3\xb6'
unicode_o = '\xc3\xb6'.decode('utf-8')
# path.py on py2 is inheriting from str instead of unicode under this
# circumstances, therefore we have to expect string
if os.path.supports_unicode_filenames:
expected.decode('utf-8')
else:
expected = 'something/ö'
unicode_o = 'ö'
assert expected == os.path.join(paver.path.path('something'), unicode_o)
|
<commit_before># -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
merged = b'something/\xc3\xb6'.decode('utf-8') # there is ö after something
if not os.path.supports_unicode_filenames and sys.version_info[0] < 3:
merged = merged.encode('utf-8')
assert merged == os.path.join(paver.path.path('something'), (b'\xc3\xb6').decode('utf-8'))
<commit_msg>Rewrite unicode join test to work with py25<commit_after>
|
# -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
# This is why we should drop 2.5 asap :]
# b'' strings are not supported in 2.5, while u'' string are not supported in 3.2
# -- even syntactically, so if will not help you here
if sys.version_info[0] < 3:
expected = 'something/\xc3\xb6'
unicode_o = '\xc3\xb6'.decode('utf-8')
# path.py on py2 is inheriting from str instead of unicode under this
# circumstances, therefore we have to expect string
if os.path.supports_unicode_filenames:
expected.decode('utf-8')
else:
expected = 'something/ö'
unicode_o = 'ö'
assert expected == os.path.join(paver.path.path('something'), unicode_o)
|
# -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
merged = b'something/\xc3\xb6'.decode('utf-8') # there is ö after something
if not os.path.supports_unicode_filenames and sys.version_info[0] < 3:
merged = merged.encode('utf-8')
assert merged == os.path.join(paver.path.path('something'), (b'\xc3\xb6').decode('utf-8'))
Rewrite unicode join test to work with py25# -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
# This is why we should drop 2.5 asap :]
# b'' strings are not supported in 2.5, while u'' string are not supported in 3.2
# -- even syntactically, so if will not help you here
if sys.version_info[0] < 3:
expected = 'something/\xc3\xb6'
unicode_o = '\xc3\xb6'.decode('utf-8')
# path.py on py2 is inheriting from str instead of unicode under this
# circumstances, therefore we have to expect string
if os.path.supports_unicode_filenames:
expected.decode('utf-8')
else:
expected = 'something/ö'
unicode_o = 'ö'
assert expected == os.path.join(paver.path.path('something'), unicode_o)
|
<commit_before># -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
merged = b'something/\xc3\xb6'.decode('utf-8') # there is ö after something
if not os.path.supports_unicode_filenames and sys.version_info[0] < 3:
merged = merged.encode('utf-8')
assert merged == os.path.join(paver.path.path('something'), (b'\xc3\xb6').decode('utf-8'))
<commit_msg>Rewrite unicode join test to work with py25<commit_after># -*- coding: utf-8 -*-
import paver.path
import sys
import os.path
def test_join_on_unicode_path():
# This is why we should drop 2.5 asap :]
# b'' strings are not supported in 2.5, while u'' string are not supported in 3.2
# -- even syntactically, so if will not help you here
if sys.version_info[0] < 3:
expected = 'something/\xc3\xb6'
unicode_o = '\xc3\xb6'.decode('utf-8')
# path.py on py2 is inheriting from str instead of unicode under this
# circumstances, therefore we have to expect string
if os.path.supports_unicode_filenames:
expected.decode('utf-8')
else:
expected = 'something/ö'
unicode_o = 'ö'
assert expected == os.path.join(paver.path.path('something'), unicode_o)
|
a284cd351e77938d2ba90e7b358b38b5afb4f4bb
|
fireplace/cards/wog/paladin.py
|
fireplace/cards/wog/paladin.py
|
from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
|
from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
class OG_223:
"Divine Strength"
play = Buff(TARGET, "OG_223e")
OG_223e = buff(+1, +2)
class OG_273:
"Stand Against Darkness"
play = Summon(CONTROLLER, "CS2_101t") * 5
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
|
Implement Divine Strength and Stand Against Darkness
|
Implement Divine Strength and Stand Against Darkness
|
Python
|
agpl-3.0
|
jleclanche/fireplace,beheh/fireplace,NightKev/fireplace
|
from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
Implement Divine Strength and Stand Against Darkness
|
from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
class OG_223:
"Divine Strength"
play = Buff(TARGET, "OG_223e")
OG_223e = buff(+1, +2)
class OG_273:
"Stand Against Darkness"
play = Summon(CONTROLLER, "CS2_101t") * 5
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
|
<commit_before>from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
<commit_msg>Implement Divine Strength and Stand Against Darkness<commit_after>
|
from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
class OG_223:
"Divine Strength"
play = Buff(TARGET, "OG_223e")
OG_223e = buff(+1, +2)
class OG_273:
"Stand Against Darkness"
play = Summon(CONTROLLER, "CS2_101t") * 5
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
|
from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
Implement Divine Strength and Stand Against Darknessfrom ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
class OG_223:
"Divine Strength"
play = Buff(TARGET, "OG_223e")
OG_223e = buff(+1, +2)
class OG_273:
"Stand Against Darkness"
play = Summon(CONTROLLER, "CS2_101t") * 5
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
|
<commit_before>from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
<commit_msg>Implement Divine Strength and Stand Against Darkness<commit_after>from ..utils import *
##
# Minions
class OG_006:
"Vilefin Inquisitor"
play = Summon(CONTROLLER, "OG_006b")
class OG_006b:
"The Tidal Hand"
activate = Summon(CONTROLLER, "OG_006a")
class OG_221:
"Selfless Hero"
deathrattle = GiveDivineShield(RANDOM_FRIENDLY_MINION)
##
# Spells
class OG_223:
"Divine Strength"
play = Buff(TARGET, "OG_223e")
OG_223e = buff(+1, +2)
class OG_273:
"Stand Against Darkness"
play = Summon(CONTROLLER, "CS2_101t") * 5
##
# Weapons
class OG_222:
"Rallying Blade"
play = Buff(FRIENDLY_MINIONS + DIVINE_SHIELD, "OG_222e")
OG_222e = buff(+1, +1)
|
79460959472f44abaed3d03689f9d397a77399c7
|
apps/careeropportunity/forms.py
|
apps/careeropportunity/forms.py
|
from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Kort ingress til karrieremuligheten'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured')
|
from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
title = forms.CharField(label='Tittel', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Tittel for karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Kort ingress til karrieremuligheten (Max 250 tegn)'}))
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
start = forms.DateTimeField(label='Start-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg start-tid'}))
end = forms.DateTimeField(label='Slutt-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg slutt-tid'}))
deadline = forms.DateTimeField(label='Søknadsfrist', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg søknadsfrist'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured', 'deadline', 'employment', 'location')
|
Add inputfields for new attributes on careeropportunities and placeholdertext
|
Add inputfields for new attributes on careeropportunities and placeholdertext
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Kort ingress til karrieremuligheten'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured')
Add inputfields for new attributes on careeropportunities and placeholdertext
|
from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
title = forms.CharField(label='Tittel', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Tittel for karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Kort ingress til karrieremuligheten (Max 250 tegn)'}))
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
start = forms.DateTimeField(label='Start-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg start-tid'}))
end = forms.DateTimeField(label='Slutt-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg slutt-tid'}))
deadline = forms.DateTimeField(label='Søknadsfrist', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg søknadsfrist'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured', 'deadline', 'employment', 'location')
|
<commit_before>from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Kort ingress til karrieremuligheten'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured')
<commit_msg>Add inputfields for new attributes on careeropportunities and placeholdertext<commit_after>
|
from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
title = forms.CharField(label='Tittel', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Tittel for karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Kort ingress til karrieremuligheten (Max 250 tegn)'}))
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
start = forms.DateTimeField(label='Start-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg start-tid'}))
end = forms.DateTimeField(label='Slutt-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg slutt-tid'}))
deadline = forms.DateTimeField(label='Søknadsfrist', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg søknadsfrist'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured', 'deadline', 'employment', 'location')
|
from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Kort ingress til karrieremuligheten'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured')
Add inputfields for new attributes on careeropportunities and placeholdertextfrom django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
title = forms.CharField(label='Tittel', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Tittel for karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Kort ingress til karrieremuligheten (Max 250 tegn)'}))
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
start = forms.DateTimeField(label='Start-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg start-tid'}))
end = forms.DateTimeField(label='Slutt-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg slutt-tid'}))
deadline = forms.DateTimeField(label='Søknadsfrist', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg søknadsfrist'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured', 'deadline', 'employment', 'location')
|
<commit_before>from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Kort ingress til karrieremuligheten'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured')
<commit_msg>Add inputfields for new attributes on careeropportunities and placeholdertext<commit_after>from django import forms
from apps.careeropportunity.models import CareerOpportunity
class AddCareerOpportunityForm(forms.ModelForm):
title = forms.CharField(label='Tittel', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Tittel for karrieremuligheten'}))
ingress = forms.CharField(label='Ingress', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Kort ingress til karrieremuligheten (Max 250 tegn)'}))
description = forms.CharField(label='Beskrivelse', required=True, widget=forms.Textarea(
attrs={'placeholder': 'Detaljert beskrivelse av karrieremuligheten'}))
start = forms.DateTimeField(label='Start-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg start-tid'}))
end = forms.DateTimeField(label='Slutt-tid', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg slutt-tid'}))
deadline = forms.DateTimeField(label='Søknadsfrist', required=True, widget=forms.TextInput(
attrs={'placeholder': 'Velg søknadsfrist'}))
class Meta:
model = CareerOpportunity
fields = ('company', 'title', 'ingress', 'description', 'start', 'end', 'featured', 'deadline', 'employment', 'location')
|
9be0a60ee69deda4a1efa82498169e3bdcc9e55a
|
froide/helper/api_renderers.py
|
froide/helper/api_renderers.py
|
'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {}).get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
|
'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {})
if not isinstance(data, list):
data = data.get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
|
Fix error on different result structure rendering
|
Fix error on different result structure rendering
|
Python
|
mit
|
fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide
|
'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {}).get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
Fix error on different result structure rendering
|
'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {})
if not isinstance(data, list):
data = data.get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
|
<commit_before>'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {}).get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
<commit_msg>Fix error on different result structure rendering<commit_after>
|
'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {})
if not isinstance(data, list):
data = data.get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
|
'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {}).get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
Fix error on different result structure rendering'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {})
if not isinstance(data, list):
data = data.get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
|
<commit_before>'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {}).get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
<commit_msg>Fix error on different result structure rendering<commit_after>'''
This needs it's own module due to import cycles
as it the class here is referenced in settings.
'''
from rest_framework_csv.renderers import PaginatedCSVRenderer
class CustomPaginatedCSVRenderer(PaginatedCSVRenderer):
"""
Our pagination has an objects level with additional facets
This renderer only renders results
"""
def render(self, data, *args, **kwargs):
if not isinstance(data, list):
data = data.get('objects', {})
if not isinstance(data, list):
data = data.get('results', [])
return super(PaginatedCSVRenderer, self).render(data, *args, **kwargs)
|
e5a77f49eecd411d96e2826a789589bd09caae38
|
pywinauto/unittests/testall.py
|
pywinauto/unittests/testall.py
|
import unittest
import os.path
import os
import sys
sys.path.append(".")
#from pywinauto.timings import Timings
#Timings.Fast()
excludes = ['test_sendkeys']
def run_tests():
testfolder = os.path.abspath(os.path.split(__file__)[0])
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
#print imported_mod.__dict__
globals().update(imported_mod.__dict__)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.main()#testRunner = runner)
if __name__ == '__main__':
run_tests()
|
import os
import sys
import unittest
import coverage
# needs to be called before importing the modules
cov = coverage.coverage(branch = True)
cov.start()
testfolder = os.path.abspath(os.path.dirname(__file__))
package_root = os.path.abspath(os.path.join(testfolder, r"..\.."))
sys.path.append(package_root)
import pywinauto
modules_to_test = [pywinauto]
def run_tests():
excludes = ['test_sendkeys']
suite = unittest.TestSuite()
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
suite.addTests(
unittest.defaultTestLoader.loadTestsFromModule(imported_mod))
#unittest.main()#testRunner = runner)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.TextTestRunner(verbosity=1).run(suite)
cov.stop()
#print cov.analysis()
print cov.report()
cov.html_report(
directory = os.path.join(package_root, "Coverage_report"))
if __name__ == '__main__':
run_tests()
|
Synchronize testing module with BetterBatch one - and integrate Coverage reporting
|
Synchronize testing module with BetterBatch one - and integrate Coverage reporting
|
Python
|
lgpl-2.1
|
coandco/pywinauto,coandco/pywinauto,coandco/pywinauto
|
import unittest
import os.path
import os
import sys
sys.path.append(".")
#from pywinauto.timings import Timings
#Timings.Fast()
excludes = ['test_sendkeys']
def run_tests():
testfolder = os.path.abspath(os.path.split(__file__)[0])
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
#print imported_mod.__dict__
globals().update(imported_mod.__dict__)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.main()#testRunner = runner)
if __name__ == '__main__':
run_tests()Synchronize testing module with BetterBatch one - and integrate Coverage reporting
|
import os
import sys
import unittest
import coverage
# needs to be called before importing the modules
cov = coverage.coverage(branch = True)
cov.start()
testfolder = os.path.abspath(os.path.dirname(__file__))
package_root = os.path.abspath(os.path.join(testfolder, r"..\.."))
sys.path.append(package_root)
import pywinauto
modules_to_test = [pywinauto]
def run_tests():
excludes = ['test_sendkeys']
suite = unittest.TestSuite()
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
suite.addTests(
unittest.defaultTestLoader.loadTestsFromModule(imported_mod))
#unittest.main()#testRunner = runner)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.TextTestRunner(verbosity=1).run(suite)
cov.stop()
#print cov.analysis()
print cov.report()
cov.html_report(
directory = os.path.join(package_root, "Coverage_report"))
if __name__ == '__main__':
run_tests()
|
<commit_before>import unittest
import os.path
import os
import sys
sys.path.append(".")
#from pywinauto.timings import Timings
#Timings.Fast()
excludes = ['test_sendkeys']
def run_tests():
testfolder = os.path.abspath(os.path.split(__file__)[0])
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
#print imported_mod.__dict__
globals().update(imported_mod.__dict__)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.main()#testRunner = runner)
if __name__ == '__main__':
run_tests()<commit_msg>Synchronize testing module with BetterBatch one - and integrate Coverage reporting<commit_after>
|
import os
import sys
import unittest
import coverage
# needs to be called before importing the modules
cov = coverage.coverage(branch = True)
cov.start()
testfolder = os.path.abspath(os.path.dirname(__file__))
package_root = os.path.abspath(os.path.join(testfolder, r"..\.."))
sys.path.append(package_root)
import pywinauto
modules_to_test = [pywinauto]
def run_tests():
excludes = ['test_sendkeys']
suite = unittest.TestSuite()
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
suite.addTests(
unittest.defaultTestLoader.loadTestsFromModule(imported_mod))
#unittest.main()#testRunner = runner)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.TextTestRunner(verbosity=1).run(suite)
cov.stop()
#print cov.analysis()
print cov.report()
cov.html_report(
directory = os.path.join(package_root, "Coverage_report"))
if __name__ == '__main__':
run_tests()
|
import unittest
import os.path
import os
import sys
sys.path.append(".")
#from pywinauto.timings import Timings
#Timings.Fast()
excludes = ['test_sendkeys']
def run_tests():
testfolder = os.path.abspath(os.path.split(__file__)[0])
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
#print imported_mod.__dict__
globals().update(imported_mod.__dict__)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.main()#testRunner = runner)
if __name__ == '__main__':
run_tests()Synchronize testing module with BetterBatch one - and integrate Coverage reportingimport os
import sys
import unittest
import coverage
# needs to be called before importing the modules
cov = coverage.coverage(branch = True)
cov.start()
testfolder = os.path.abspath(os.path.dirname(__file__))
package_root = os.path.abspath(os.path.join(testfolder, r"..\.."))
sys.path.append(package_root)
import pywinauto
modules_to_test = [pywinauto]
def run_tests():
excludes = ['test_sendkeys']
suite = unittest.TestSuite()
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
suite.addTests(
unittest.defaultTestLoader.loadTestsFromModule(imported_mod))
#unittest.main()#testRunner = runner)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.TextTestRunner(verbosity=1).run(suite)
cov.stop()
#print cov.analysis()
print cov.report()
cov.html_report(
directory = os.path.join(package_root, "Coverage_report"))
if __name__ == '__main__':
run_tests()
|
<commit_before>import unittest
import os.path
import os
import sys
sys.path.append(".")
#from pywinauto.timings import Timings
#Timings.Fast()
excludes = ['test_sendkeys']
def run_tests():
testfolder = os.path.abspath(os.path.split(__file__)[0])
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
#print imported_mod.__dict__
globals().update(imported_mod.__dict__)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.main()#testRunner = runner)
if __name__ == '__main__':
run_tests()<commit_msg>Synchronize testing module with BetterBatch one - and integrate Coverage reporting<commit_after>import os
import sys
import unittest
import coverage
# needs to be called before importing the modules
cov = coverage.coverage(branch = True)
cov.start()
testfolder = os.path.abspath(os.path.dirname(__file__))
package_root = os.path.abspath(os.path.join(testfolder, r"..\.."))
sys.path.append(package_root)
import pywinauto
modules_to_test = [pywinauto]
def run_tests():
excludes = ['test_sendkeys']
suite = unittest.TestSuite()
sys.path.append(testfolder)
for root, dirs, files in os.walk(testfolder):
test_modules = [
file.replace('.py', '') for file in files if
file.startswith('test_') and
file.endswith('.py')]
test_modules = [mod for mod in test_modules if mod.lower() not in excludes]
for mod in test_modules:
#globals().update(__import__(mod, globals(), locals()).__dict__)
# import it
imported_mod = __import__(mod, globals(), locals())
suite.addTests(
unittest.defaultTestLoader.loadTestsFromModule(imported_mod))
#unittest.main()#testRunner = runner)
#runner = unittest.TextTestRunner(verbosity = 2)
unittest.TextTestRunner(verbosity=1).run(suite)
cov.stop()
#print cov.analysis()
print cov.report()
cov.html_report(
directory = os.path.join(package_root, "Coverage_report"))
if __name__ == '__main__':
run_tests()
|
d4cfe4c9d5ff680a85c25c144b077d928386811c
|
onetime/backends.py
|
onetime/backends.py
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
Use filter() instead of get() since the key might be invalid
|
Use filter() instead of get() since the key might be invalid
|
Python
|
agpl-3.0
|
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,vanschelven/cmsplugin-journal,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,uploadcare/django-loginurl,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Use filter() instead of get() since the key might be invalid
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Use filter() instead of get() since the key might be invalid<commit_after>
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Use filter() instead of get() since the key might be invalidfrom django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
<commit_before>from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.get(key=key)
if data is None:
None
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Use filter() instead of get() since the key might be invalid<commit_after>from django.contrib.auth.models import User
from onetime.models import Key
class OneTimeBackend:
def authenticate(self, key):
data = Key.objects.filter(key=key)
if len(data) == 0:
return None
data = data[0]
if not data.is_valid():
return None
return data.user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
59d7c19f26d2907413e5ee4cb86cbd534e89135b
|
examples/livestream_datalogger.py
|
examples/livestream_datalogger.py
|
from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print(e)
except Exception as e:
print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close()
|
from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
# 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
# instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
# Stop a previous session, if any, then start a new single-channel log in real
# time over the network.
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, samples = i.datalogger_get_samples()
print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
except NoDataException:
print("Finished")
finally:
i.datalogger_stop()
m.close()
|
Simplify and clean the livestream data logger
|
PM-133: Simplify and clean the livestream data logger
|
Python
|
mit
|
liquidinstruments/pymoku
|
from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print(e)
except Exception as e:
print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close()
PM-133: Simplify and clean the livestream data logger
|
from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
# 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
# instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
# Stop a previous session, if any, then start a new single-channel log in real
# time over the network.
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, samples = i.datalogger_get_samples()
print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
except NoDataException:
print("Finished")
finally:
i.datalogger_stop()
m.close()
|
<commit_before>from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print(e)
except Exception as e:
print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close()
<commit_msg>PM-133: Simplify and clean the livestream data logger<commit_after>
|
from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
# 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
# instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
# Stop a previous session, if any, then start a new single-channel log in real
# time over the network.
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, samples = i.datalogger_get_samples()
print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
except NoDataException:
print("Finished")
finally:
i.datalogger_stop()
m.close()
|
from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print(e)
except Exception as e:
print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close()
PM-133: Simplify and clean the livestream data loggerfrom pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
# 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
# instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
# Stop a previous session, if any, then start a new single-channel log in real
# time over the network.
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, samples = i.datalogger_get_samples()
print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
except NoDataException:
print("Finished")
finally:
i.datalogger_stop()
m.close()
|
<commit_before>from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print(e)
except Exception as e:
print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close()
<commit_msg>PM-133: Simplify and clean the livestream data logger<commit_after>from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
# 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
# instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
# Stop a previous session, if any, then start a new single-channel log in real
# time over the network.
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, samples = i.datalogger_get_samples()
print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
except NoDataException:
print("Finished")
finally:
i.datalogger_stop()
m.close()
|
8c177eec8edd0006fd9a86ce7b9b91a28c536971
|
02_ganymede/ganymede/jupyter_notebook_config.py
|
02_ganymede/ganymede/jupyter_notebook_config.py
|
c.NotebookApp.server_extensions = [
'ganymede.ganymede',
'jupyter_nbgallery'
]
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
|
c.NotebookApp.nbserver_extensions = {
'ganymede.ganymede': 'ganymede.ganymede',
'jupyter_nbgallery': 'jupyter_nbgallery'
}
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
|
Change server_extensions to nbserver_extensions since server_extensions is deprecated.
|
Change server_extensions to nbserver_extensions since server_extensions is deprecated.
|
Python
|
apache-2.0
|
kylemvz/nbserver,agude/nbserver,kylemvz/nbserver,Lab41/nbserver,agude/nbserver,Lab41/nbserver
|
c.NotebookApp.server_extensions = [
'ganymede.ganymede',
'jupyter_nbgallery'
]
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
Change server_extensions to nbserver_extensions since server_extensions is deprecated.
|
c.NotebookApp.nbserver_extensions = {
'ganymede.ganymede': 'ganymede.ganymede',
'jupyter_nbgallery': 'jupyter_nbgallery'
}
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
|
<commit_before>c.NotebookApp.server_extensions = [
'ganymede.ganymede',
'jupyter_nbgallery'
]
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
<commit_msg>Change server_extensions to nbserver_extensions since server_extensions is deprecated.<commit_after>
|
c.NotebookApp.nbserver_extensions = {
'ganymede.ganymede': 'ganymede.ganymede',
'jupyter_nbgallery': 'jupyter_nbgallery'
}
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
|
c.NotebookApp.server_extensions = [
'ganymede.ganymede',
'jupyter_nbgallery'
]
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
Change server_extensions to nbserver_extensions since server_extensions is deprecated.c.NotebookApp.nbserver_extensions = {
'ganymede.ganymede': 'ganymede.ganymede',
'jupyter_nbgallery': 'jupyter_nbgallery'
}
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
|
<commit_before>c.NotebookApp.server_extensions = [
'ganymede.ganymede',
'jupyter_nbgallery'
]
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
<commit_msg>Change server_extensions to nbserver_extensions since server_extensions is deprecated.<commit_after>c.NotebookApp.nbserver_extensions = {
'ganymede.ganymede': 'ganymede.ganymede',
'jupyter_nbgallery': 'jupyter_nbgallery'
}
c.NotebookApp.allow_origin = 'https://nb.gallery'
from ganymede.ganymede import GanymedeHandler
import logstash
import os
if {"L41_LOGSTASH_HOST", "L41_LOGSTASH_PORT"} < set(os.environ):
GanymedeHandler.handlers = [
logstash.TCPLogstashHandler(
os.environ["L41_LOGSTASH_HOST"],
os.environ["L41_LOGSTASH_PORT"],
version=1,
)
]
|
a353f35952fa85a2337f04378782eb806700238b
|
cairis/web_tests/CairisTests.py
|
cairis/web_tests/CairisTests.py
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.start(['-d', '--unit-test'])
sleep(1)
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
Call main rather than start
|
Call main rather than start
|
Python
|
apache-2.0
|
failys/CAIRIS,failys/CAIRIS,nathanbjenx/cairis,nathanbjenx/cairis,nathanbjenx/cairis,nathanbjenx/cairis,failys/CAIRIS
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.start(['-d', '--unit-test'])
sleep(1)
Call main rather than start
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
<commit_before># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.start(['-d', '--unit-test'])
sleep(1)
<commit_msg>Call main rather than start<commit_after>
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.start(['-d', '--unit-test'])
sleep(1)
Call main rather than start# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
<commit_before># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.start(['-d', '--unit-test'])
sleep(1)
<commit_msg>Call main rather than start<commit_after># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from time import sleep
import unittest
import cairis.bin.cairisd
__author__ = 'Robin Quetin'
class CairisTests(unittest.TestCase):
app = cairis.bin.cairisd.main(['-d', '--unit-test'])
sleep(1)
|
bf6a3e5633b53ee3d65946a4b5ca2427d8edcd16
|
samples/export_table_to_gcs.py
|
samples/export_table_to_gcs.py
|
# [START export_table_to_gcs]
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
# [END export_table_to_gcs]
return job_resource
|
# [START export_table_to_gcs]
from samples import auth
from samples import poll_job
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
# [START job_data]
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
# [END job_data]
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
return job_resource
def main():
project_id = raw_input("Choose your project ID: ")
dataset_id = raw_input("Choose a dataset ID: ")
table_id = raw_input("Choose a table name to copy: ")
gcs_path = raw_input("Enter a GCS URI: ")
bigquery = auth.get_service()
resource = export_table(bigquery, project_id, dataset_id, table_id, gcs_path)
poll_job(bigquery, job_resource)
print 'Done exporting!'
# [END export_table_to_gcs]
|
Update export example to be runnable.
|
Update export example to be runnable.
|
Python
|
apache-2.0
|
googlearchive/bigquery-samples-python,googlearchive/bigquery-samples-python
|
# [START export_table_to_gcs]
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
# [END export_table_to_gcs]
return job_resource
Update export example to be runnable.
|
# [START export_table_to_gcs]
from samples import auth
from samples import poll_job
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
# [START job_data]
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
# [END job_data]
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
return job_resource
def main():
project_id = raw_input("Choose your project ID: ")
dataset_id = raw_input("Choose a dataset ID: ")
table_id = raw_input("Choose a table name to copy: ")
gcs_path = raw_input("Enter a GCS URI: ")
bigquery = auth.get_service()
resource = export_table(bigquery, project_id, dataset_id, table_id, gcs_path)
poll_job(bigquery, job_resource)
print 'Done exporting!'
# [END export_table_to_gcs]
|
<commit_before># [START export_table_to_gcs]
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
# [END export_table_to_gcs]
return job_resource
<commit_msg>Update export example to be runnable.<commit_after>
|
# [START export_table_to_gcs]
from samples import auth
from samples import poll_job
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
# [START job_data]
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
# [END job_data]
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
return job_resource
def main():
project_id = raw_input("Choose your project ID: ")
dataset_id = raw_input("Choose a dataset ID: ")
table_id = raw_input("Choose a table name to copy: ")
gcs_path = raw_input("Enter a GCS URI: ")
bigquery = auth.get_service()
resource = export_table(bigquery, project_id, dataset_id, table_id, gcs_path)
poll_job(bigquery, job_resource)
print 'Done exporting!'
# [END export_table_to_gcs]
|
# [START export_table_to_gcs]
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
# [END export_table_to_gcs]
return job_resource
Update export example to be runnable.# [START export_table_to_gcs]
from samples import auth
from samples import poll_job
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
# [START job_data]
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
# [END job_data]
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
return job_resource
def main():
project_id = raw_input("Choose your project ID: ")
dataset_id = raw_input("Choose a dataset ID: ")
table_id = raw_input("Choose a table name to copy: ")
gcs_path = raw_input("Enter a GCS URI: ")
bigquery = auth.get_service()
resource = export_table(bigquery, project_id, dataset_id, table_id, gcs_path)
poll_job(bigquery, job_resource)
print 'Done exporting!'
# [END export_table_to_gcs]
|
<commit_before># [START export_table_to_gcs]
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
# [END export_table_to_gcs]
return job_resource
<commit_msg>Update export example to be runnable.<commit_after># [START export_table_to_gcs]
from samples import auth
from samples import poll_job
def export_table(service, project_id, dataset_id, table_id, gcs_path):
"""starts a job which exports data from the specified table,
to the specified Google Cloud Storage file, returns a job resource"""
job_collection = service.jobs()
# [START job_data]
job_data = {
'projectId': project_id,
'configuration': {
'extract': {
'sourceTable': {
'projectId': project_id,
'datasetId': dataset_id,
'tableId': table_id,
},
'destinationUris': [gcs_path],
}
}
}
# [END job_data]
job_resource = job_collection.insert(
projectId=project_id,
body=job_data).execute()
return job_resource
def main():
project_id = raw_input("Choose your project ID: ")
dataset_id = raw_input("Choose a dataset ID: ")
table_id = raw_input("Choose a table name to copy: ")
gcs_path = raw_input("Enter a GCS URI: ")
bigquery = auth.get_service()
resource = export_table(bigquery, project_id, dataset_id, table_id, gcs_path)
poll_job(bigquery, job_resource)
print 'Done exporting!'
# [END export_table_to_gcs]
|
12b6bf33205fbf09854e4e97c12ce62da992028a
|
renzongxian/0001/0001.py
|
renzongxian/0001/0001.py
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
|
Add a blank line in the end
|
Add a blank line in the end
|
Python
|
mit
|
agogear/python-1,Show-Me-the-Code/python,Pritesh242/python,merfii/PythonExercises,whix/python,xchaoinfo/python,tzq668766/python,Jaccorot/python,wangjun/python,zhakui/python,YGIronMan/python,luoxufeiyan/python,Friday21/python_show_me_the_code,Friday21/python_show_me_the_code,karnikamit/python,YGIronMan/python,ZuoGuocai/python,whix/python,Pritesh242/python,wangjun/python,zhakui/python,DIYgod/python,YGIronMan/python,haiyangd/python-show-me-the-code-,fairyzoro/python,ionutcipriananescu/python,ionutcipriananescu/python,yangzilong1986/python,Yrthgze/prueba-sourcetree2,EricSekyere/python,tzq668766/python,yangzilong1986/python,llluiop/python-1,ionutcipriananescu/python,xiaoixa/python,JiYouMCC/python,wangjun/python,zhenglaizhang/python,dominjune/python,tzq668766/python,Ph0enixxx/python,Show-Me-the-Code/python,whix/python,Show-Me-the-Code/python,hooting/show-me-the-code-python,keysona/python,DIYgod/python,llluiop/python-1,karnikamit/python,xchaoinfo/python,tzq668766/python,hooting/show-me-the-code-python,keysona/python,ZSeaPeng/python,DanielShangHai/python,Yrthgze/prueba-sourcetree2,Jaccorot/python,dominjune/python,ZSeaPeng/python,hooting/show-me-the-code-python,Supersuuu/python,snailwalker/python,Yrthgze/prueba-sourcetree2,Mark24Code/python,haiyangd/python-show-me-the-code-,karnikamit/python,ZuoGuocai/python,lz199144/python,merfii/PythonExercises,whix/python,agogear/python-1,xchaoinfo/python,Show-Me-the-Code/python,Jaccorot/python,llluiop/python-1,EricSekyere/python,zhakui/python,snailwalker/python,yangzilong1986/python,Supersuuu/python,luoxufeiyan/python,Jaccorot/python,merfii/PythonExercises,yangzilong1986/python,fairyzoro/python,karnikamit/python,DanielShangHai/python,tzq668766/python,haiyangd/python-show-me-the-code-,Supersuuu/python,starlightme/python,wangjun/python,llluiop/python-1,Friday21/python_show_me_the_code,starlightme/python,starlightme/python,sravaniaitha/python,fairyzoro/python,12wang3/python,12wang3/python,sravaniaitha/python,JiYouMCC/python,xiaoixa/python,JiYouMCC/python,lz199144/python,Pritesh242/python,hooting/show-me-the-code-python,luoxufeiyan/python,fairyzoro/python,agogear/python-1,hooting/show-me-the-code-python,lz199144/python,lz199144/python,Pritesh242/python,lz199144/python,zhenglaizhang/python,Supersuuu/python,Ph0enixxx/python,Pritesh242/python,snailwalker/python,EricSekyere/python,xchaoinfo/python,12wang3/python,xchaoinfo/python,ZSeaPeng/python,Yrthgze/prueba-sourcetree2,renzongxian/Show-Me-the-Code,ZuoGuocai/python,zhenglaizhang/python,Yrthgze/prueba-sourcetree2,ionutcipriananescu/python,renzongxian/Show-Me-the-Code,sravaniaitha/python,fairyzoro/python,sravaniaitha/python,snailwalker/python,karnikamit/python,Show-Me-the-Code/python,DanielShangHai/python,dominjune/python,zhakui/python,Friday21/python_show_me_the_code,Friday21/python_show_me_the_code,DIYgod/python,DanielShangHai/python,DanielShangHai/python,JiYouMCC/python,renzongxian/Show-Me-the-Code,ionutcipriananescu/python,zhenglaizhang/python,EricSekyere/python,snailwalker/python,xiaoixa/python,haiyangd/python-show-me-the-code-,EricSekyere/python,JiYouMCC/python,zhenglaizhang/python,Mark24Code/python,llluiop/python-1,keysona/python,whix/python,haiyangd/python-show-me-the-code-,DIYgod/python,YGIronMan/python,luoxufeiyan/python,keysona/python,12wang3/python,dominjune/python,ZuoGuocai/python,ZuoGuocai/python,yangzilong1986/python,starlightme/python,Jaccorot/python,starlightme/python,Ph0enixxx/python,merfii/PythonExercises,Mark24Code/python,xiaoixa/python,Ph0enixxx/python,luoxufeiyan/python,keysona/python,Show-Me-the-Code/python,ZSeaPeng/python,ZSeaPeng/python,merfii/PythonExercises,wangjun/python,Ph0enixxx/python,Mark24Code/python,zhakui/python,renzongxian/Show-Me-the-Code,Mark24Code/python,renzongxian/Show-Me-the-Code,YGIronMan/python,Yrthgze/prueba-sourcetree2,agogear/python-1,sravaniaitha/python,xiaoixa/python,dominjune/python,agogear/python-1,12wang3/python
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
Add a blank line in the end
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
|
<commit_before># Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
<commit_msg>Add a blank line in the end<commit_after>
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
Add a blank line in the end# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
|
<commit_before># Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
<commit_msg>Add a blank line in the end<commit_after># Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券),
使用 Python 如何生成 200 个激活码(或者优惠券)?
"""
import uuid
def generate_key():
key_list = []
for i in range(200):
uuid_key = uuid.uuid3(uuid.NAMESPACE_DNS, str(uuid.uuid1()))
key_list.append(str(uuid_key).replace('-', ''))
return key_list
if __name__ == '__main__':
print(generate_key())
|
8eaa0f2fef26cc90e3aea5dea1253b7980400375
|
latest_tweets/templatetags/latest_tweets_tags.py
|
latest_tweets/templatetags/latest_tweets_tags.py
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if liked_by:
tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
Add tag support for getting liked tweets
|
Add tag support for getting liked tweets
|
Python
|
bsd-3-clause
|
blancltd/django-latest-tweets
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
Add tag support for getting liked tweets
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if liked_by:
tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
<commit_before>from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
<commit_msg>Add tag support for getting liked tweets<commit_after>
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if liked_by:
tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
Add tag support for getting liked tweetsfrom django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if liked_by:
tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
<commit_before>from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
<commit_msg>Add tag support for getting liked tweets<commit_after>from django import template
from latest_tweets.models import Tweet
register = template.Library()
@register.assignment_tag
def get_latest_tweets(*args, **kwargs):
limit = kwargs.pop('limit', None)
include_replies = kwargs.pop('include_replies', False)
liked_by = kwargs.pop('liked_by', None)
tweets = Tweet.objects.all()
# By default we exclude replies
if not include_replies:
tweets = tweets.exclude(is_reply=True)
if liked_by:
tweets = tweets.filter(like__user=liked_by)
if args:
tweets = tweets.filter(user__in=args)
if limit is not None:
tweets = tweets[:limit]
return tweets
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.